You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/18 14:33:45 UTC
[26/33] ambari git commit: AMBARI-15079. Preupload.py should
pre-create hdfs directories (aonishuk)
AMBARI-15079. Preupload.py should pre-create hdfs directories (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/df2dbe15
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/df2dbe15
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/df2dbe15
Branch: refs/heads/branch-dev-patch-upgrade
Commit: df2dbe150b97ecb74ddf4652463b8062f288203a
Parents: b15c0f3
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Feb 18 00:50:21 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Feb 18 00:50:48 2016 +0200
----------------------------------------------------------------------
.../libraries/providers/hdfs_resource.py | 63 +++++++++++++-------
.../libraries/resources/hdfs_resource.py | 12 ++++
.../1.6.1.2.2.0/package/scripts/params.py | 1 +
.../0.1.0/package/scripts/params.py | 1 +
.../0.5.0.2.1/package/scripts/params_linux.py | 1 +
.../0.96.0.2.0/package/scripts/params_linux.py | 1 +
.../2.1.0.2.0/package/scripts/params_linux.py | 1 +
.../0.12.0.2.0/package/scripts/params_linux.py | 1 +
.../KAFKA/0.8.1.2.2/package/scripts/params.py | 1 +
.../0.5.0.2.2/package/scripts/params_linux.py | 1 +
.../MAHOUT/1.0.0.2.3/package/scripts/params.py | 1 +
.../4.0.0.2.0/package/scripts/params_linux.py | 1 +
.../0.12.0.2.0/package/scripts/params_linux.py | 1 +
.../0.60.0.2.2/package/scripts/params_linux.py | 1 +
.../SPARK/1.2.0.2.2/package/scripts/params.py | 1 +
.../0.9.1.2.1/package/scripts/params_linux.py | 1 +
.../0.4.0.2.1/package/scripts/params_linux.py | 1 +
.../2.1.0.2.0/package/scripts/params_linux.py | 1 +
.../main/resources/scripts/Ambaripreupload.py | 54 ++++++++++++++---
.../2.0.6/hooks/before-START/scripts/params.py | 1 +
.../AMBARI_METRICS/test_metrics_collector.py | 6 +-
.../stacks/2.0.6/HBASE/test_hbase_master.py | 18 +++---
.../python/stacks/2.0.6/HDFS/test_namenode.py | 54 ++++++++---------
.../stacks/2.0.6/HDFS/test_service_check.py | 8 +--
.../stacks/2.0.6/HIVE/test_hive_server.py | 28 ++++-----
.../2.0.6/HIVE/test_hive_service_check.py | 12 ++--
.../stacks/2.0.6/OOZIE/test_oozie_server.py | 32 +++++-----
.../stacks/2.0.6/OOZIE/test_service_check.py | 10 ++--
.../stacks/2.0.6/PIG/test_pig_service_check.py | 12 ++--
.../stacks/2.0.6/YARN/test_historyserver.py | 29 +++++----
.../2.0.6/YARN/test_mapreduce2_service_check.py | 12 ++--
.../stacks/2.1/FALCON/test_falcon_server.py | 12 ++--
.../python/stacks/2.1/TEZ/test_service_check.py | 16 ++---
.../stacks/2.1/YARN/test_apptimelineserver.py | 2 +-
.../stacks/2.2/PIG/test_pig_service_check.py | 12 ++--
.../stacks/2.2/SPARK/test_job_history_server.py | 11 ++--
.../2.3/MAHOUT/test_mahout_service_check.py | 8 +--
.../2.3/SPARK/test_spark_thrift_server.py | 4 +-
.../test/python/stacks/2.3/YARN/test_ats_1_5.py | 10 ++--
39 files changed, 267 insertions(+), 175 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
index ebcf1a4..b73ae56 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
@@ -151,32 +151,13 @@ class WebHDFSUtil:
# only hdfs seems to support webHDFS
return (is_webhdfs_enabled and default_fs.startswith("hdfs"))
- def parse_path(self, path):
- """
- hdfs://nn_url:1234/a/b/c -> /a/b/c
- hdfs://nn_ha_name/a/b/c -> /a/b/c
- hdfs:///a/b/c -> /a/b/c
- /a/b/c -> /a/b/c
- """
- math_with_protocol_and_nn_url = re.match("[a-zA-Z]+://[^/]+(/.+)", path)
- math_with_protocol = re.match("[a-zA-Z]+://(/.+)", path)
-
- if math_with_protocol_and_nn_url:
- path = math_with_protocol_and_nn_url.group(1)
- elif math_with_protocol:
- path = math_with_protocol.group(1)
- else:
- path = path
-
- return re.sub("[/]+", "/", path)
-
valid_status_codes = ["200", "201"]
def run_command(self, target, operation, method='POST', assertable_result=True, file_to_put=None, ignore_status_codes=[], **kwargs):
"""
assertable_result - some POST requests return '{"boolean":false}' or '{"boolean":true}'
depending on if query was successful or not, we can assert this for them
"""
- target = self.parse_path(target)
+ target = HdfsResourceProvider.parse_path(target)
url = format("{address}/webhdfs/v1{target}?op={operation}&user.name={run_user}", address=self.address, run_user=self.run_user)
for k,v in kwargs.iteritems():
@@ -394,7 +375,7 @@ class HdfsResourceWebHDFS:
def _fill_in_parent_directories(self, target, results):
- path_parts = self.util.parse_path(target).split("/")[1:]# [1:] remove '' from parts
+ path_parts = HdfsResourceProvider.parse_path(target).split("/")[1:]# [1:] remove '' from parts
path = "/"
for path_part in path_parts:
@@ -416,13 +397,51 @@ class HdfsResourceProvider(Provider):
def __init__(self, resource):
super(HdfsResourceProvider,self).__init__(resource)
self.fsType = getattr(resource, 'dfs_type')
+ self.ignored_resources_list = self.get_ignored_resources_list()
if self.fsType != 'HCFS':
self.assert_parameter_is_set('hdfs_site')
self.webhdfs_enabled = self.resource.hdfs_site['dfs.webhdfs.enabled']
+
+ @staticmethod
+ def parse_path(path):
+ """
+ hdfs://nn_url:1234/a/b/c -> /a/b/c
+ hdfs://nn_ha_name/a/b/c -> /a/b/c
+ hdfs:///a/b/c -> /a/b/c
+ /a/b/c -> /a/b/c
+ """
+ math_with_protocol_and_nn_url = re.match("[a-zA-Z]+://[^/]+(/.+)", path)
+ math_with_protocol = re.match("[a-zA-Z]+://(/.+)", path)
+
+ if math_with_protocol_and_nn_url:
+ path = math_with_protocol_and_nn_url.group(1)
+ elif math_with_protocol:
+ path = math_with_protocol.group(1)
+ else:
+ path = path
+
+ return re.sub("[/]+", "/", path)
+
+ def get_ignored_resources_list(self):
+ if not self.resource.hdfs_resource_ignore_file or not os.path.exists(self.resource.hdfs_resource_ignore_file):
+ return []
+
+ with open(self.resource.hdfs_resource_ignore_file, "rb") as fp:
+ content = fp.read()
+
+ hdfs_resources_to_ignore = []
+ for hdfs_resource_to_ignore in content.split("\n"):
+ hdfs_resources_to_ignore.append(HdfsResourceProvider.parse_path(hdfs_resource_to_ignore))
+
+ return hdfs_resources_to_ignore
def action_delayed(self, action_name):
self.assert_parameter_is_set('type')
-
+
+ if HdfsResourceProvider.parse_path(self.resource.target) in self.ignored_resources_list:
+ Logger.info("Skipping '{0}' because it is in ignore file {1}.".format(self.resource, self.resource.hdfs_resource_ignore_file))
+ return
+
self.get_hdfs_resource_executor().action_delayed(action_name, self)
def action_create_on_execute(self):
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
index 7c12409..c5460a0 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
@@ -76,6 +76,18 @@ class HdfsResource(Resource):
hadoop_bin_dir = ResourceArgument()
hadoop_conf_dir = ResourceArgument()
+ """
+ Path to file which contains '\n'-separated list of hdfs resources, which should not
+ be managed. (simply any action to be skipped on it)
+
+ This mean that if HdfsResource('/test1'..) is executed and /test1 is one of the lines
+ in the given file, the execution will be ignored.
+
+ Example value:
+ /var/lib/ambari-agent/data/.hdfs_resource_ignore
+ """
+ hdfs_resource_ignore_file = ResourceArgument()
+
# WebHDFS needs these
hdfs_site = ResourceArgument()
default_fs = ResourceArgument()
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 2bd2626..993d4cf 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -186,6 +186,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index f3a97fc..f1ff998 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -265,6 +265,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index b150464..6b37451 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -119,6 +119,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 6837bf1..03486a7 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -223,6 +223,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 29c4784..3fb4486 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -325,6 +325,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index dc17dba..e9500d9 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -443,6 +443,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user = hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
index 47af240..d629533 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
@@ -274,6 +274,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 7ec85b5..c42c123 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -343,6 +343,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 69c03ea..b1667a8 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -82,6 +82,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 7a2f6f6..81c894a 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -265,6 +265,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index f923723..fc6fd81 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -83,6 +83,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 09b7876..132ff77 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -64,6 +64,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 68c4f37..7bf1f1c 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -188,6 +188,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
index 25da2a1..33615ba 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
@@ -295,6 +295,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 25f867e..da52556 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -87,6 +87,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index f0b6927..2ef404d 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -268,6 +268,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 61db286..a803de4 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -166,7 +166,8 @@ with Environment() as env:
hadoop_conf_dir = hadoop_conf_dir,
principal_name = None,
hdfs_site = hdfs_site,
- default_fs = fs_default
+ default_fs = fs_default,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
)
def _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed):
@@ -231,7 +232,40 @@ with Environment() as env:
source_and_dest_pairs = [(component_tar_source_file, destination_file), ]
return _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed)
+
+ def createHdfsResources():
+ params.HdfsResource('/atshistory', user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
+ params.HdfsResource('/user/hcat', owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
+ params.HdfsResource('/hive/warehouse', owner='hive', type='directory', action=['create_on_execute'], mode=0777)
+ params.HdfsResource('/user/hive', owner='hive', type='directory', action=['create_on_execute'], mode=0755)
+ params.HdfsResource('/tmp', mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
+ params.HdfsResource('/user/ambari-qa', type='directory', action=['create_on_execute'], mode=0770)
+ params.HdfsResource('/user/oozie', owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
+ params.HdfsResource('/app-logs', recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+ params.HdfsResource('/tmp/entity-file-history/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
+ params.HdfsResource('/mapred', owner='mapred', type='directory', action=['create_on_execute'])
+ params.HdfsResource('/mapred/system', owner='hdfs', type='directory', action=['create_on_execute'])
+ params.HdfsResource('/mr-history/done', change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+ params.HdfsResource('/atshistory/done', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
+ params.HdfsResource('/atshistory/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
+ params.HdfsResource('/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+ params.HdfsResource('/amshbase/staging', owner='ams', type='directory', action=['create_on_execute'], mode=0711)
+ params.HdfsResource('/user/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+
+ def putCreatedHdfsResourcesToIgnore(env):
+ if not 'hdfs_files' in env.config:
+ Logger.info("Not creating .hdfs_resource_ignore as no resources to use.")
+ return
+
+ file_content = ""
+ for file in env.config['hdfs_files']:
+ file_content += file['target']
+ file_content += "\n"
+
+ with open("/var/lib/ambari-agent/data/.hdfs_resource_ignore", "a+") as fp:
+ fp.write(file_content)
+
env.set_params(params)
hadoop_conf_dir = params.hadoop_conf_dir
@@ -272,7 +306,7 @@ with Environment() as env:
# DON'T CHANGE THE VALUE SINCE IT'S USED TO DETERMINE WHETHER TO RUN THE COMMAND OR NOT BY READING THE MARKER FILE.
# Oozie tmp dir should be /var/tmp/oozie and is already created by a function above.
command = format("cd {oozie_tmp_dir} && {oozie_setup_sh} prepare-war {oozie_secure} ")
- command_to_file = format("cd {oozie_tmp_dir} && {oozie_setup_sh_current} prepare-war {oozie_secure} ")
+ command_to_file = format("cd {oozie_tmp_dir} && {oozie_setup_sh_current} prepare-war {oozie_secure} ").strip()
run_prepare_war = False
if os.path.exists(prepare_war_cmd_file):
@@ -338,7 +372,9 @@ with Environment() as env:
copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/pig/pig.tar.gz"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/pig/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/hadoop-mapreduce/hadoop-streaming.jar"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/sqoop/sqoop.tar.gz"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/sqoop/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
-
+ print "Creating hdfs directories..."
+ createHdfsResources()
+ putCreatedHdfsResourcesToIgnore(env)
# jar shouldn't be used before (read comment below)
File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
@@ -347,8 +383,12 @@ with Environment() as env:
)
# Create everything in one jar call (this is fast).
# (! Before everything should be executed with action="create_on_execute/delete_on_execute" for this time-optimization to work)
- params.HdfsResource(None,
- logoutput=True,
- action="execute"
- )
+ try:
+ params.HdfsResource(None,
+ logoutput=True,
+ action="execute"
+ )
+ except:
+ os.remove("/var/lib/ambari-agent/data/.hdfs_resource_ignore")
+ raise
print "Completed tarball copy. Ambari preupload script completed."
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 2a9d7c5..3570e45 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -240,6 +240,7 @@ import functools
HdfsResource = functools.partial(
HdfsResource,
user=hdfs_user,
+ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
security_enabled = security_enabled,
keytab = hdfs_user_keytab,
kinit_path_local = kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
index 96e2286..64b16c6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
@@ -311,7 +311,7 @@ class TestMetricsCollector(RMFTestCase):
mode = 0775,
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hdfs_site=self.getConfig()['configurations']['hdfs-site'],
principal_name=UnknownConfigurationMock(),
default_fs='hdfs://c6401.ambari.apache.org:8020',
@@ -327,7 +327,7 @@ class TestMetricsCollector(RMFTestCase):
mode = 0711,
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hdfs_site=self.getConfig()['configurations']['hdfs-site'],
principal_name=UnknownConfigurationMock(),
default_fs='hdfs://c6401.ambari.apache.org:8020',
@@ -339,7 +339,7 @@ class TestMetricsCollector(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hdfs_site=self.getConfig()['configurations']['hdfs-site'],
principal_name=UnknownConfigurationMock(),
default_fs='hdfs://c6401.ambari.apache.org:8020',
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index 13b2e33..d8cecec 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -340,7 +340,7 @@ class TestHBaseMaster(RMFTestCase):
owner = 'hbase',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
)
self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
security_enabled = False,
@@ -353,7 +353,7 @@ class TestHBaseMaster(RMFTestCase):
owner = 'hbase',
hadoop_bin_dir = '/usr/bin',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0711,
)
self.assertResourceCalled('HdfsResource', None,
@@ -364,7 +364,7 @@ class TestHBaseMaster(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
@@ -478,7 +478,7 @@ class TestHBaseMaster(RMFTestCase):
owner = 'hbase',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
)
self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
security_enabled = True,
@@ -491,7 +491,7 @@ class TestHBaseMaster(RMFTestCase):
owner = 'hbase',
hadoop_bin_dir = '/usr/bin',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0711,
)
self.assertResourceCalled('HdfsResource', None,
@@ -502,7 +502,7 @@ class TestHBaseMaster(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
@@ -627,7 +627,7 @@ class TestHBaseMaster(RMFTestCase):
owner = 'hbase',
hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
security_enabled = False,
@@ -642,7 +642,7 @@ class TestHBaseMaster(RMFTestCase):
owner = 'hbase',
hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0711,
)
self.assertResourceCalled('HdfsResource', None,
@@ -655,7 +655,7 @@ class TestHBaseMaster(RMFTestCase):
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 39244ff..fc22d08 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -108,7 +108,7 @@ class TestNamenode(RMFTestCase):
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -125,7 +125,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -139,7 +139,7 @@ class TestNamenode(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -221,7 +221,7 @@ class TestNamenode(RMFTestCase):
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -238,7 +238,7 @@ class TestNamenode(RMFTestCase):
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -252,7 +252,7 @@ class TestNamenode(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -346,7 +346,7 @@ class TestNamenode(RMFTestCase):
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0777,
only_if = True
)
@@ -360,7 +360,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0770,
only_if = True
)
@@ -372,7 +372,7 @@ class TestNamenode(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -446,7 +446,7 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -463,7 +463,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -477,7 +477,7 @@ class TestNamenode(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -543,7 +543,7 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -560,7 +560,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -574,7 +574,7 @@ class TestNamenode(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -646,7 +646,7 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -663,7 +663,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -677,7 +677,7 @@ class TestNamenode(RMFTestCase):
principal_name = 'hdfs',
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -749,7 +749,7 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -766,7 +766,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -780,7 +780,7 @@ class TestNamenode(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -851,7 +851,7 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -868,7 +868,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -882,7 +882,7 @@ class TestNamenode(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
@@ -961,7 +961,7 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -978,7 +978,7 @@ class TestNamenode(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0770,
)
self.assertResourceCalled('HdfsResource', None,
@@ -992,7 +992,7 @@ class TestNamenode(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index 0f5afa8..851aee2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -69,7 +69,7 @@ class TestServiceCheck(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
mode = 0777,
@@ -84,7 +84,7 @@ class TestServiceCheck(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['delete_on_execute'],
+ action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
)
@@ -99,7 +99,7 @@ class TestServiceCheck(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
)
@@ -113,7 +113,7 @@ class TestServiceCheck(RMFTestCase):
principal_name = None,
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 494d16c..9929d00 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -340,7 +340,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hcat',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
mode = 0755,
)
self.assertResourceCalled('HdfsResource', '/user/hcat',
@@ -353,7 +353,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hcat',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
mode = 0755,
)
@@ -371,7 +371,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hive',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/hive',
@@ -384,7 +384,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hive',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
mode = 0755,
)
if not no_tmp:
@@ -399,7 +399,7 @@ class TestHiveServer(RMFTestCase):
group = 'hdfs',
hadoop_bin_dir = '/usr/bin',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
mode = 0777,
)
self.assertResourceCalled('HdfsResource', None,
@@ -409,7 +409,7 @@ class TestHiveServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/hive',
@@ -528,7 +528,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hcat',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0755,
)
self.assertResourceCalled('HdfsResource', '/user/hcat',
@@ -541,7 +541,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hcat',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0755,
)
@@ -555,7 +555,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hive',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', '/user/hive',
@@ -568,7 +568,7 @@ class TestHiveServer(RMFTestCase):
owner = 'hive',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0755,
)
self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
@@ -582,7 +582,7 @@ class TestHiveServer(RMFTestCase):
group = 'hdfs',
hadoop_bin_dir = '/usr/bin',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0777,
)
self.assertResourceCalled('HdfsResource', None,
@@ -592,7 +592,7 @@ class TestHiveServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/hive',
@@ -926,7 +926,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
)
self.assertNoMoreResources()
@@ -968,7 +968,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
)
self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index ea17c27..589b437 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -100,7 +100,7 @@ class TestServiceCheck(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
security_enabled = False,
@@ -116,7 +116,7 @@ class TestServiceCheck(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', None,
security_enabled = False,
@@ -128,7 +128,7 @@ class TestServiceCheck(RMFTestCase):
principal_name = 'missing_principal',
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig no_keytab false /usr/bin/kinit no_principal',
@@ -212,7 +212,7 @@ class TestServiceCheck(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
security_enabled = True,
@@ -228,7 +228,7 @@ class TestServiceCheck(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', None,
security_enabled = True,
@@ -240,7 +240,7 @@ class TestServiceCheck(RMFTestCase):
principal_name = 'hdfs',
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index ba1b84a..0683551 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -73,7 +73,7 @@ class TestOozieServer(RMFTestCase):
owner = 'oozie',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0775,
)
self.assertResourceCalled('HdfsResource', None,
@@ -83,7 +83,7 @@ class TestOozieServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -291,7 +291,7 @@ class TestOozieServer(RMFTestCase):
owner = 'oozie',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0775,
)
self.assertResourceCalled('HdfsResource', None,
@@ -304,7 +304,7 @@ class TestOozieServer(RMFTestCase):
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -521,7 +521,7 @@ class TestOozieServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
principal_name = UnknownConfigurationMock(),
recursive_chmod = True,
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
mode = 0755,
@@ -536,7 +536,7 @@ class TestOozieServer(RMFTestCase):
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
@@ -616,7 +616,7 @@ class TestOozieServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
principal_name = 'hdfs',
recursive_chmod = True,
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
mode = 0755,
@@ -631,7 +631,7 @@ class TestOozieServer(RMFTestCase):
principal_name = 'hdfs',
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
@@ -670,7 +670,7 @@ class TestOozieServer(RMFTestCase):
owner = 'oozie',
hadoop_bin_dir = '/usr/bin',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0775,
)
self.assertResourceCalled('HdfsResource', None,
@@ -680,7 +680,7 @@ class TestOozieServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -858,7 +858,7 @@ class TestOozieServer(RMFTestCase):
owner = 'oozie',
hadoop_bin_dir = '/usr/bin',
type = 'directory',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
mode = 0775,
)
self.assertResourceCalled('HdfsResource', None,
@@ -869,7 +869,7 @@ class TestOozieServer(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -1406,7 +1406,7 @@ class TestOozieServer(RMFTestCase):
group = 'hadoop',
hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0755 )
self.assertResourceCalled('HdfsResource', None,
@@ -1419,7 +1419,7 @@ class TestOozieServer(RMFTestCase):
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf' )
self.assertResourceCalled('Execute', '/usr/hdp/2.3.0.0-1234/oozie/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020',
@@ -1479,7 +1479,7 @@ class TestOozieServer(RMFTestCase):
group = 'hadoop',
hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
type = 'directory',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
mode = 0755 )
self.assertResourceCalled('HdfsResource', None,
@@ -1492,7 +1492,7 @@ class TestOozieServer(RMFTestCase):
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf' )
self.assertResourceCalled('Execute', '/usr/hdp/2.3.0.0-1234/oozie/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020',
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
index a6d0145..dc35612 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
@@ -89,7 +89,7 @@ class TestServiceCheck(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
)
@@ -101,7 +101,7 @@ class TestServiceCheck(RMFTestCase):
source = '//examples',
user = 'hdfs',
dfs_type = '',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
owner = 'ambari-qa',
@@ -114,7 +114,7 @@ class TestServiceCheck(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
)
@@ -126,7 +126,7 @@ class TestServiceCheck(RMFTestCase):
source = '//examples/input-data',
user = 'hdfs',
dfs_type = '',
- action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
owner = 'ambari-qa',
@@ -139,7 +139,7 @@ class TestServiceCheck(RMFTestCase):
kinit_path_local = '/usr/bin/kinit',
user = 'hdfs',
dfs_type = '',
- action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Execute', '/tmp/oozieSmoke2.sh suse /var/lib/oozie /etc/oozie/conf /usr/bin http://c6402.ambari.apache.org:11000/oozie / /etc/hadoop/conf /usr/bin ambari-qa False',
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
index c5de4c3..9c0b035 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
@@ -45,7 +45,7 @@ class TestPigServiceCheck(RMFTestCase):
hadoop_conf_dir = '/etc/hadoop/conf',
dfs_type = '',
type = 'directory',
- action = ['delete_on_execute'],
+ action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
security_enabled = False,
@@ -61,7 +61,7 @@ class TestPigServiceCheck(RMFTestCase):
hadoop_conf_dir = '/etc/hadoop/conf',
dfs_type = '',
type = 'file',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', None,
security_enabled = False,
@@ -73,7 +73,7 @@ class TestPigServiceCheck(RMFTestCase):
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
@@ -119,7 +119,7 @@ class TestPigServiceCheck(RMFTestCase):
owner = 'ambari-qa',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'directory',
- action = ['delete_on_execute'],
+ action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
security_enabled = True,
@@ -135,7 +135,7 @@ class TestPigServiceCheck(RMFTestCase):
dfs_type = '',
hadoop_conf_dir = '/etc/hadoop/conf',
type = 'file',
- action = ['create_on_execute'],
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
)
self.assertResourceCalled('HdfsResource', None,
security_enabled = True,
@@ -147,7 +147,7 @@ class TestPigServiceCheck(RMFTestCase):
principal_name = 'hdfs',
user = 'hdfs',
dfs_type = '',
- action = ['execute'],
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hadoop_conf_dir = '/etc/hadoop/conf',
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',