You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2018/05/29 11:11:54 UTC
[ambari] branch trunk updated: AMBARI-23943. Hiveserver2 fails to
start on viewFS enabled cluster (#1393)
This is an automated email from the ASF dual-hosted git repository.
aonishuk pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git
The following commit(s) were added to refs/heads/trunk by this push:
new 70f95fa AMBARI-23943. Hiveserver2 fails to start on viewFS enabled cluster (#1393)
70f95fa is described below
commit 70f95fa544211d7e87b8910e8f099540f02f491b
Author: aonishuk <ao...@hortonworks.com>
AuthorDate: Tue May 29 14:11:38 2018 +0300
AMBARI-23943. Hiveserver2 fails to start on viewFS enabled cluster (#1393)
AMBARI-23943. Hiveserver2 fails to start on viewFS enabled cluster: {hive_server2_zookeeper_namespace} is not ready yet (aonishuk)
---
.../ambari_agent/dummy_files/alert_definitions.json | 4 ++--
.../AMBARI_METRICS/0.1.0/package/scripts/params.py | 4 +++-
.../HAWQ/2.0.0/package/scripts/params.py | 4 +++-
.../KAFKA/0.8.1/package/scripts/params.py | 4 +++-
.../KNOX/0.5.0.2.2/package/scripts/params_linux.py | 4 +++-
.../PXF/3.0.0/package/scripts/params.py | 4 +++-
.../RANGER_KMS/0.5.0.2.3/package/scripts/params.py | 4 +++-
.../0.60.0.2.2/package/scripts/params_linux.py | 5 ++++-
.../STORM/0.9.1/package/scripts/params_linux.py | 5 ++++-
.../ZEPPELIN/0.6.0/package/scripts/params.py | 4 +++-
.../ZEPPELIN/0.7.0/package/scripts/params.py | 4 +++-
.../python/common-services/HAWQ/test_hawqmaster.py | 6 ++++--
.../2.0.6/AMBARI_METRICS/test_metrics_collector.py | 1 +
.../python/stacks/2.5/RANGER_KMS/test_kms_server.py | 18 ++++++++++++------
.../python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py | 5 +++++
.../python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py | 20 +++++++++++++++++---
16 files changed, 73 insertions(+), 23 deletions(-)
diff --git a/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json b/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json
index 341017c..d9a82a7 100644
--- a/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json
+++ b/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json
@@ -7,9 +7,9 @@
{
"name": "namenode_process",
"service": "HDFS",
- "component": "NAMENODE",
- "interval": 6,
"enabled": true,
+ "interval": 6,
+ "component": "NAMENODE",
"label": "NameNode process",
"source": {
"reporting": {
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 9424752..08fa675 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -397,6 +397,7 @@ clusterHostInfoDict = config["clusterHostInfo"]
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
+dfs_type = default("/commandParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
@@ -413,7 +414,8 @@ HdfsResource = functools.partial(
principal_name = hdfs_principal_name,
hdfs_site = hdfs_site,
default_fs = default_fs,
- immutable_paths = get_not_managed_resources()
+ immutable_paths = get_not_managed_resources(),
+ dfs_type = dfs_type
)
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index 65d8ada..fb33e68 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -74,6 +74,7 @@ if dfs_nameservice is None:
dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
hawq_global_rm_type = default('/configurations/hawq-site/hawq_global_rm_type', None)
+dfs_type = default("/commandParams/dfs_type", "")
# HDFSResource partial function
HdfsResource = functools.partial(HdfsResource,
@@ -86,7 +87,8 @@ HdfsResource = functools.partial(HdfsResource,
hadoop_conf_dir = hadoop_conf_dir,
hdfs_site=hdfs_site,
default_fs=default_fs,
- immutable_paths = get_not_managed_resources())
+ immutable_paths = get_not_managed_resources(),
+ dfs_type = dfs_type)
# File partial function
File = functools.partial(File,
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index 4f79d24..f07cba8 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -335,6 +335,7 @@ default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_nameno
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+dfs_type = default("/commandParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
@@ -351,5 +352,6 @@ HdfsResource = functools.partial(
principal_name = hdfs_principal_name,
hdfs_site = hdfs_site,
default_fs = default_fs,
- immutable_paths = get_not_managed_resources()
+ immutable_paths = get_not_managed_resources(),
+ dfs_type = dfs_type
)
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index aafee1f..21ff2c6 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -513,6 +513,7 @@ hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
+dfs_type = default("/commandParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
@@ -529,7 +530,8 @@ HdfsResource = functools.partial(
principal_name = hdfs_principal_name,
hdfs_site = hdfs_site,
default_fs = default_fs,
- immutable_paths = get_not_managed_resources()
+ immutable_paths = get_not_managed_resources(),
+ dfs_type = dfs_type
)
druid_coordinator_urls = ""
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
index a0bf339..e451aa1 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
@@ -78,6 +78,7 @@ hdfs_user_keytab = default('configurations/hadoop-env/hdfs_user_keytab', None)
hdfs_principal_name = default('configurations/hadoop-env/hdfs_principal_name', None)
hbase_user_keytab = default('configurations/hbase-env/hbase_user_keytab', None)
hbase_principal_name = default('configurations/hbase-env/hbase_principal_name', None)
+dfs_type = default("/commandParams/dfs_type", "")
# HDFSResource partial function
HdfsResource = functools.partial(HdfsResource,
@@ -88,5 +89,6 @@ HdfsResource = functools.partial(HdfsResource,
principal_name=hdfs_principal_name,
hdfs_site=hdfs_site,
default_fs=default_fs,
- immutable_paths = get_not_managed_resources())
+ immutable_paths = get_not_managed_resources(),
+ dfs_type = dfs_type)
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index 5ccfb0d..eac71dd 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -314,6 +314,7 @@ default_fs = default("/configurations/core-site/fs.defaultFS", None)
hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+dfs_type = default("/commandParams/dfs_type", "")
import functools
# create partial functions with common arguments for every HdfsResource call
@@ -328,7 +329,8 @@ HdfsResource = functools.partial(
hadoop_conf_dir = hadoop_conf_dir,
principal_name = hdfs_principal_name,
hdfs_site = hdfs_site,
- default_fs = default_fs
+ default_fs = default_fs,
+ dfs_type = dfs_type
)
local_component_list = default("/localComponents", [])
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 4566111..49839bb 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -61,6 +61,8 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
+dfs_type = default("/commandParams/dfs_type", "")
+
import functools
#create partial functions with common arguments for every HdfsResource call
#to create/delete/copyfromlocal hdfs directories/files we need to call params.HdfsResource in code
@@ -76,5 +78,6 @@ HdfsResource = functools.partial(
principal_name = hdfs_principal_name,
hdfs_site = hdfs_site,
default_fs = default_fs,
- immutable_paths = get_not_managed_resources()
+ immutable_paths = get_not_managed_resources(),
+ dfs_type = dfs_type
)
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
index 9b7f27af..260e74d 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
@@ -409,6 +409,8 @@ hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+dfs_type = default("/commandParams/dfs_type", "")
+
import functools
#create partial functions with common arguments for every HdfsResource call
#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
@@ -424,5 +426,6 @@ HdfsResource = functools.partial(
principal_name = hdfs_principal_name,
hdfs_site = hdfs_site,
default_fs = default_fs,
- immutable_paths = get_not_managed_resources()
+ immutable_paths = get_not_managed_resources(),
+ dfs_type = dfs_type
)
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
index 23dd4dd..ef1ba32 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
@@ -239,6 +239,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
+dfs_type = default("/commandParams/dfs_type", "")
# create partial functions with common arguments for every HdfsResource call
# to create hdfs directory we need to call params.HdfsResource in code
@@ -253,5 +254,6 @@ HdfsResource = functools.partial(
hadoop_conf_dir=hadoop_conf_dir,
principal_name=hdfs_principal_name,
hdfs_site=hdfs_site,
- default_fs=default_fs
+ default_fs=default_fs,
+ dfs_type=dfs_type,
)
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
index a8c4bdf..a960c09 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -253,6 +253,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
+dfs_type = default("/commandParams/dfs_type", "")
# create partial functions with common arguments for every HdfsResource call
# to create hdfs directory we need to call params.HdfsResource in code
@@ -267,5 +268,6 @@ HdfsResource = functools.partial(
hadoop_conf_dir=hadoop_conf_dir,
principal_name=hdfs_principal_name,
hdfs_site=hdfs_site,
- default_fs=default_fs
+ default_fs=default_fs,
+ dfs_type=dfs_type,
)
diff --git a/ambari-server/src/test/python/common-services/HAWQ/test_hawqmaster.py b/ambari-server/src/test/python/common-services/HAWQ/test_hawqmaster.py
index 3b09309..6595348 100644
--- a/ambari-server/src/test/python/common-services/HAWQ/test_hawqmaster.py
+++ b/ambari-server/src/test/python/common-services/HAWQ/test_hawqmaster.py
@@ -91,7 +91,8 @@ class TestHawqMaster(HawqBaseTestCase):
kinit_path_local = '/usr/bin/kinit',
recursive_chown = True,
keytab = UnknownConfigurationMock(),
- principal_name = UnknownConfigurationMock()
+ principal_name = UnknownConfigurationMock(),
+ dfs_type = '',
)
self.assertResourceCalled('HdfsResource', None,
@@ -105,7 +106,8 @@ class TestHawqMaster(HawqBaseTestCase):
security_enabled = False,
kinit_path_local = '/usr/bin/kinit',
keytab = UnknownConfigurationMock(),
- principal_name = UnknownConfigurationMock()
+ principal_name = UnknownConfigurationMock(),
+ dfs_type = '',
)
self.assertResourceCalled('Execute', self.SOURCE_HAWQ_SCRIPT + 'hawq init master -a -v --ignore-bad-hosts',
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
index fc4d79d..4eacefe 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
@@ -364,6 +364,7 @@ class TestMetricsCollector(RMFTestCase):
hdfs_site=self.getConfig()['configurations']['hdfs-site'],
principal_name=UnknownConfigurationMock(),
default_fs='hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
)
self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//distributed_mode', action=["create"],
mode=0644, owner='ams')
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
index 678a5ad..53d44c1 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
@@ -175,7 +175,8 @@ class TestRangerKMS(RMFTestCase):
hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
principal_name = None,
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- default_fs = 'hdfs://c6401.ambari.apache.org:8020'
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
)
self.assertResourceCalled('HdfsResource', '/ranger/audit/kms',
@@ -193,7 +194,8 @@ class TestRangerKMS(RMFTestCase):
hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
principal_name = None,
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- default_fs = 'hdfs://c6401.ambari.apache.org:8020'
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
)
self.assertResourceCalled('HdfsResource', None,
@@ -206,7 +208,8 @@ class TestRangerKMS(RMFTestCase):
hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
principal_name = None,
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- default_fs = 'hdfs://c6401.ambari.apache.org:8020'
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
)
self.assertResourceCalled('File', '/usr/hdp/current/ranger-kms/conf/hdfs-site.xml',
@@ -616,7 +619,8 @@ class TestRangerKMS(RMFTestCase):
hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
principal_name = 'hdfs-cl1@EXAMPLE.COM',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- default_fs = 'hdfs://c6401.ambari.apache.org:8020'
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
)
self.assertResourceCalled('HdfsResource', '/ranger/audit/kms',
@@ -634,7 +638,8 @@ class TestRangerKMS(RMFTestCase):
hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
principal_name = 'hdfs-cl1@EXAMPLE.COM',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- default_fs = 'hdfs://c6401.ambari.apache.org:8020'
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
)
self.assertResourceCalled('HdfsResource', None,
@@ -647,7 +652,8 @@ class TestRangerKMS(RMFTestCase):
hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
principal_name = 'hdfs-cl1@EXAMPLE.COM',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- default_fs = 'hdfs://c6401.ambari.apache.org:8020'
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
)
self.assertResourceCalled('File', '/usr/hdp/current/ranger-kms/conf/hdfs-site.xml',
diff --git a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
index 7d4217e..9dfc2ab 100644
--- a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
+++ b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
@@ -252,6 +252,7 @@ class TestZeppelin060(RMFTestCase):
type = 'directory',
action = ['create_on_execute'],
recursive_chmod = True,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/user/zeppelin/test',
security_enabled = True,
@@ -269,6 +270,7 @@ class TestZeppelin060(RMFTestCase):
type = 'directory',
action = ['create_on_execute'],
recursive_chmod = True,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/apps/zeppelin',
security_enabled = True,
@@ -286,6 +288,7 @@ class TestZeppelin060(RMFTestCase):
type = 'directory',
action = ['create_on_execute'],
recursive_chmod = True,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/apps/zeppelin/tmp',
security_enabled = True,
@@ -305,6 +308,7 @@ class TestZeppelin060(RMFTestCase):
type = 'file',
action = ['create_on_execute'],
mode = 0444,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', None,
security_enabled = True,
@@ -318,6 +322,7 @@ class TestZeppelin060(RMFTestCase):
user = 'hdfs',
action = ['execute'],
hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
+ dfs_type='',
)
self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
content=interpreter_json_generated.template_after_base,
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index b3c8c58..ae446bd 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -236,7 +236,8 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='directory',
action=['create_on_execute'],
- recursive_chmod=True
+ recursive_chmod=True,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/user/zeppelin/notebook',
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -254,7 +255,8 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='directory',
action=['create_on_execute'],
- recursive_chmod=True
+ recursive_chmod=True,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/user/zeppelin',
@@ -272,7 +274,8 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='directory',
action=['create_on_execute'],
- recursive_chmod=True
+ recursive_chmod=True,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/user/zeppelin/test',
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -290,6 +293,7 @@ class TestZeppelin070(RMFTestCase):
keytab=UnknownConfigurationMock(),
principal_name=UnknownConfigurationMock(),
security_enabled=False,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/apps/zeppelin',
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -307,6 +311,7 @@ class TestZeppelin070(RMFTestCase):
keytab=UnknownConfigurationMock(),
principal_name=UnknownConfigurationMock(),
security_enabled=False,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', '/apps/zeppelin/tmp',
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -326,6 +331,7 @@ class TestZeppelin070(RMFTestCase):
keytab=UnknownConfigurationMock(),
principal_name=UnknownConfigurationMock(),
security_enabled=False,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', None,
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -339,6 +345,7 @@ class TestZeppelin070(RMFTestCase):
keytab=UnknownConfigurationMock(),
principal_name=UnknownConfigurationMock(),
security_enabled=False,
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource',
@@ -358,6 +365,7 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='directory',
action=['create_on_execute'],
+ dfs_type='',
)
self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
@@ -381,6 +389,7 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='file',
action=['delete_on_execute'],
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource',
@@ -402,6 +411,7 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='file',
action=['create_on_execute'],
+ dfs_type='',
)
self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
@@ -425,6 +435,7 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='file',
action=['delete_on_execute'],
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource',
@@ -446,6 +457,7 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='file',
action=['create_on_execute'],
+ dfs_type='',
)
self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
@@ -469,6 +481,7 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
type='file',
action=['delete_on_execute'],
+ dfs_type='',
)
self.assertResourceCalled('HdfsResource', 'hdfs:///user/zeppelin/conf/interpreter.json',
@@ -489,6 +502,7 @@ class TestZeppelin070(RMFTestCase):
hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
type = 'file',
action = ['create_on_execute'],
+ dfs_type='',
)
self.assertResourceCalled('Execute',
--
To stop receiving notification emails like this one, please contact
aonishuk@apache.org.