You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2017/09/26 22:26:07 UTC

[08/50] [abbrv] ambari git commit: AMBARI-22030. OOZIE/OOZIE_SERVER restart failed saying AttributeError: 'NoneType' object has no attribute 'upper' during EU [accidental commit] (ncole)

AMBARI-22030. OOZIE/OOZIE_SERVER restart failed saying AttributeError: 'NoneType' object has no attribute 'upper' during EU [accidental commit] (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5d5f9749
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5d5f9749
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5d5f9749

Branch: refs/heads/branch-3.0-ams
Commit: 5d5f9749e82bf815a1c8f82f090b769421d39949
Parents: 7c687bb
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Sep 21 18:07:57 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Thu Sep 21 18:07:57 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/copy_tarball.py.rej     | 78 --------------------
 1 file changed, 78 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5d5f9749/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py.rej
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py.rej b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py.rej
deleted file mode 100644
index b2c11fc..0000000
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py.rej
+++ /dev/null
@@ -1,78 +0,0 @@
-diff a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py	(rejected hunks)
-@@ -41,26 +41,65 @@ STACK_VERSION_PATTERN = "{{ stack_version }}"
- # especially since it is an attribute of a stack and becomes
- # complicated to change during a Rolling/Express upgrade.
- TARBALL_MAP = {
--  "slider": ("{0}/{1}/slider/lib/slider.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+  "slider": {
-+    "dirs": ("{0}/{1}/slider/lib/slider.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-              "/{0}/apps/{1}/slider/slider.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "tez": ("{0}/{1}/tez/lib/tez.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+    "service": "SLIDER"
-+  },
-+
-+  "tez": {
-+    "dirs": ("{0}/{1}/tez/lib/tez.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-           "/{0}/apps/{1}/tez/tez.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "tez_hive2": ("{0}/{1}/tez_hive2/lib/tez.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+    "service": "TEZ"
-+  },
-+
-+  "tez_hive2": {
-+    "dirs": ("{0}/{1}/tez_hive2/lib/tez.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-           "/{0}/apps/{1}/tez_hive2/tez.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "hive": ("{0}/{1}/hive/hive.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+    "service": "HIVE"
-+  },
-+
-+  "hive": {
-+    "dirs": ("{0}/{1}/hive/hive.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-            "/{0}/apps/{1}/hive/hive.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "pig": ("{0}/{1}/pig/pig.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+    "service": "HIVE"
-+  },
-+
-+  "pig": {
-+    "dirs": ("{0}/{1}/pig/pig.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-           "/{0}/apps/{1}/pig/pig.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "hadoop_streaming": ("{0}/{1}/hadoop-mapreduce/hadoop-streaming.jar".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+    "service": "PIG"
-+  },
-+
-+  "hadoop_streaming": {
-+    "dirs": ("{0}/{1}/hadoop-mapreduce/hadoop-streaming.jar".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-                        "/{0}/apps/{1}/mapreduce/hadoop-streaming.jar".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "sqoop": ("{0}/{1}/sqoop/sqoop.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+    "service": "MAPREDUCE2"
-+  },
-+
-+  "sqoop": {
-+    "dirs": ("{0}/{1}/sqoop/sqoop.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-             "/{0}/apps/{1}/sqoop/sqoop.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "mapreduce": ("{0}/{1}/hadoop/mapreduce.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-+    "service": "SQOOP"
-+  },
-+
-+  "mapreduce": {
-+    "dirs": ("{0}/{1}/hadoop/mapreduce.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
-                 "/{0}/apps/{1}/mapreduce/mapreduce.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "spark": ("{0}/{1}/spark/lib/spark-{2}-assembly.jar".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN, STACK_NAME_PATTERN),
-+    "service": "MAPREDUCE2"
-+  },
-+
-+  "spark": {
-+    "dirs": ("{0}/{1}/spark/lib/spark-{2}-assembly.jar".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN, STACK_NAME_PATTERN),
-             "/{0}/apps/{1}/spark/spark-{0}-assembly.jar".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
--  "spark2": ("/tmp/spark2/spark2-{0}-yarn-archive.tar.gz".format(STACK_NAME_PATTERN),
--             "/{0}/apps/{1}/spark2/spark2-{0}-yarn-archive.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN))
-+    "service": "SPARK"
-+  },
-+
-+  "spark2": {
-+    "dirs": ("/tmp/spark2/spark2-{0}-yarn-archive.tar.gz".format(STACK_NAME_PATTERN),
-+             "/{0}/apps/{1}/spark2/spark2-{0}-yarn-archive.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
-+    "service": "SPARK2"
-+  }
- }
- 
-