You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sm...@apache.org on 2015/09/02 18:37:02 UTC

ambari git commit: AMBARI-12969. sys_prepped clusters should not have to copy tarballs again

Repository: ambari
Updated Branches:
  refs/heads/trunk d2e92bbdd -> c3d1306af


AMBARI-12969. sys_prepped clusters should not have to copy tarballs again


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c3d1306a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c3d1306a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c3d1306a

Branch: refs/heads/trunk
Commit: c3d1306af8683ef0a8528c3bdcadc33e012f484c
Parents: d2e92bb
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed Sep 2 09:35:42 2015 -0700
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed Sep 2 09:35:42 2015 -0700

----------------------------------------------------------------------
 .../libraries/functions/copy_tarball.py             |  9 +++++++--
 .../HIVE/0.12.0.2.0/package/scripts/hive.py         | 13 ++++++++-----
 .../HIVE/0.12.0.2.0/package/scripts/hive_server.py  | 12 ++++++++++--
 .../PIG/0.12.0.2.0/package/scripts/service_check.py |  5 ++++-
 .../1.2.0.2.2/package/scripts/job_history_server.py |  6 +++++-
 .../1.2.0.2.2/package/scripts/spark_service.py      |  2 +-
 .../TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py    |  7 ++++++-
 .../TEZ/0.4.0.2.1/package/scripts/service_check.py  |  2 +-
 .../YARN/2.1.0.2.0/package/scripts/historyserver.py | 16 ++++++++++++----
 .../python/stacks/2.0.6/HIVE/test_hive_server.py    |  8 ++++----
 .../python/stacks/2.0.6/YARN/test_historyserver.py  |  2 +-
 .../python/stacks/2.2/PIG/test_pig_service_check.py |  2 +-
 12 files changed, 60 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 792f019..badf2fe 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -96,7 +96,7 @@ def _get_single_version_from_hdp_select():
   return hdp_version
 
 def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=None, custom_dest_file=None, force_execute=False,
-                 use_ru_version_during_ru=True, replace_existing_files=False):
+                 use_ru_version_during_ru=True, replace_existing_files=False, host_sys_prepped=False):
   """
   :param name: Tarball name, e.g., tez, hive, pig, sqoop.
   :param user_group: Group to own the directory.
@@ -106,6 +106,7 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
   :param custom_dest_file: Override the destination file path
   :param force_execute: If true, will execute the HDFS commands immediately, otherwise, will defer to the calling function.
   :param use_ru_version_during_ru: If true, will use the version going to during RU. Otherwise, use the CURRENT (source) version.
+  :param host_sys_prepped: If true, tarballs will not be copied as the cluster deployment uses prepped VMs.
   :return: Will return True if successful, otherwise, False.
   """
   import params
@@ -127,6 +128,10 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
   if custom_dest_file is not None:
     dest_file = custom_dest_file
 
+  if host_sys_prepped:
+    Logger.info("Skipping copying {0} to {1} for {2} as its a sys_prepped host.".format(str(source_file), str(dest_file), str(name)))
+    return True
+
   upgrade_direction = default("/commandParams/upgrade_direction", None)
   is_rolling_upgrade = upgrade_direction is not None
   current_version = default("/hostLevelParams/current_version", None)
@@ -200,4 +205,4 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
   if force_execute:
     params.HdfsResource(None, action="execute")
 
-  return True
\ No newline at end of file
+  return True

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 1b2b155..7e35a57 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -132,8 +132,8 @@ def hive(name=None):
     # *********************************
     # HDP 2.2 or higher, copy mapreduce.tar.gz to HDFS
     if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
-      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
 
     # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
     # This can use a different source and dest location to account for both HDP 2.1 and 2.2
@@ -142,13 +142,15 @@ def hive(name=None):
                  params.hdfs_user,
                  file_mode=params.tarballs_mode,
                  custom_source_file=params.pig_tar_source,
-                 custom_dest_file=params.pig_tar_dest_file)
+                 custom_dest_file=params.pig_tar_dest_file,
+                 host_sys_prepped=params.host_sys_prepped)
     copy_to_hdfs("hive",
                  params.user_group,
                  params.hdfs_user,
                  file_mode=params.tarballs_mode,
                  custom_source_file=params.hive_tar_source,
-                 custom_dest_file=params.hive_tar_dest_file)
+                 custom_dest_file=params.hive_tar_dest_file,
+                 host_sys_prepped=params.host_sys_prepped)
 
     wildcard_tarballs = ["sqoop", "hadoop_streaming"]
     for tarball_name in wildcard_tarballs:
@@ -168,7 +170,8 @@ def hive(name=None):
                      params.hdfs_user,
                      file_mode=params.tarballs_mode,
                      custom_source_file=source_file,
-                     custom_dest_file=dest_file)
+                     custom_dest_file=dest_file,
+                     host_sys_prepped=params.host_sys_prepped)
     # ******* End Copy Tarballs *******
     # *********************************
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 3ec1747..d7542e9 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -115,8 +115,16 @@ class HiveServerDefault(HiveServer):
       hdp_select.select("hive-server2", params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
-      resource_created = copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
-      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user) or resource_created
+      resource_created = copy_to_hdfs(
+        "mapreduce",
+        params.user_group,
+        params.hdfs_user,
+        host_sys_prepped=params.host_sys_prepped)
+      resource_created = copy_to_hdfs(
+        "tez",
+        params.user_group,
+        params.hdfs_user,
+        host_sys_prepped=params.host_sys_prepped) or resource_created
       if resource_created:
         params.HdfsResource(None, action="execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index 2f8da76..66b9772 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -95,7 +95,10 @@ class PigServiceCheckLinux(PigServiceCheck):
       )
 
       # Check for Pig-on-Tez
-      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      resource_created = copy_to_hdfs(
+        "tez", params.user_group,
+        params.hdfs_user,
+        host_sys_prepped=params.host_sys_prepped)
       if resource_created:
         params.HdfsResource(None, action="execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
index 8cdafc4..b3999c3 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
@@ -82,7 +82,11 @@ class JobHistoryServer(Script):
       # need to copy the tarball, otherwise, copy it.
 
       if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') < 0:
-        resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+        resource_created = copy_to_hdfs(
+          "tez",
+          params.user_group,
+          params.hdfs_user,
+          host_sys_prepped=params.host_sys_prepped)
         if resource_created:
           params.HdfsResource(None, action="execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
index 8d758e3..d8b3e66 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
@@ -36,7 +36,7 @@ def spark_service(action):
     # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
     # need to copy the tarball, otherwise, copy it.
     if params.hdp_stack_version and compare_versions(params.hdp_stack_version, '2.3.0.0') < 0:
-      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       if resource_created:
         params.HdfsResource(None, action="execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
index 7731bc7..776d185 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/pre_upgrade.py
@@ -42,7 +42,12 @@ class TezPreUpgrade(Script):
       Logger.info("Stack version {0} is sufficient to check if need to copy tez.tar.gz to HDFS.".format(params.hdp_stack_version))
 
       # Force it to copy the current version of the tez tarball, rather than the version the RU will go to.
-      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, use_ru_version_during_ru=False)
+      resource_created = copy_to_hdfs(
+        "tez",
+        params.user_group,
+        params.hdfs_user,
+        use_ru_version_during_ru=False,
+        host_sys_prepped=params.host_sys_prepped)
       if resource_created:
         params.HdfsResource(None, action="execute")
       else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
index 9bd366b..e0fa556 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -63,7 +63,7 @@ class TezServiceCheckLinux(TezServiceCheck):
     )
 
     if params.hdp_stack_version and compare_versions(params.hdp_stack_version, '2.2.0.0') >= 0:
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
 
     params.HdfsResource(None, action = "execute")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index 16e34d4..5fe8157 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -81,8 +81,8 @@ class HistoryServerDefault(HistoryServer):
       conf_select.select(params.stack_name, "hadoop", params.version)
       hdp_select.select("hadoop-mapreduce-historyserver", params.version)
       # MC Hammer said, "Can't touch this"
-      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
-      copy_to_hdfs("tez", params.user_group, params.hdfs_user)
+      copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+      copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       params.HdfsResource(None, action="execute")
 
   def start(self, env, rolling_restart=False):
@@ -92,8 +92,16 @@ class HistoryServerDefault(HistoryServer):
 
     if params.hdp_stack_version_major and compare_versions(params.hdp_stack_version_major, '2.2.0.0') >= 0:
       # MC Hammer said, "Can't touch this"
-      resource_created = copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
-      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user) or resource_created
+      resource_created = copy_to_hdfs(
+        "mapreduce",
+        params.user_group,
+        params.hdfs_user,
+        host_sys_prepped=params.host_sys_prepped)
+      resource_created = copy_to_hdfs(
+        "tez",
+        params.user_group,
+        params.hdfs_user,
+        host_sys_prepped=params.host_sys_prepped) or resource_created
       if resource_created:
         params.HdfsResource(None, action="execute")
     else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 42477dd..83328c1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -867,8 +867,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('Execute',
                               ('hdp-select', 'set', 'hive-server2', version), sudo=True,)
 
-    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs")
-    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs")
+    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", host_sys_prepped=False)
+    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", host_sys_prepped=False)
     self.assertEquals(2, copy_to_hdfs_mock.call_count)
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
@@ -905,8 +905,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('Execute',
 
                               ('hdp-select', 'set', 'hive-server2', version), sudo=True,)
-    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs")
-    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs")
+    copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs", host_sys_prepped=False)
+    copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs", host_sys_prepped=False)
     self.assertEquals(2, copy_to_hdfs_mock.call_count)
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 6cf0f88..45c7978 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -767,7 +767,7 @@ class TestHistoryServer(RMFTestCase):
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute', ('hdp-select', 'set', 'hadoop-mapreduce-historyserver', version), sudo=True)
-    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs")
+    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs", host_sys_prepped=False)
 
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c3d1306a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
index c4bfa8e..6f83106 100644
--- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
@@ -125,7 +125,7 @@ class TestPigServiceCheck(RMFTestCase):
         action = ['create_on_execute'],
     )
 
-    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs")
+    copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs", host_sys_prepped=False)
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
         hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',