You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by vs...@apache.org on 2017/10/09 06:34:44 UTC
ambari git commit: AMBARI-22153.On Zeppelin restart sometimes
interpreter settings get reset(Prabhjyot Singh via Venkata Sairam)
Repository: ambari
Updated Branches:
refs/heads/trunk e61556cc2 -> 84e616da7
AMBARI-22153.On Zeppelin restart sometimes interpreter settings get reset(Prabhjyot Singh via Venkata Sairam)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/84e616da
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/84e616da
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/84e616da
Branch: refs/heads/trunk
Commit: 84e616da753224c43d62ddaeb8f1ef935c62d876
Parents: e61556c
Author: Venkata Sairam <ve...@gmail.com>
Authored: Mon Oct 9 12:04:32 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Mon Oct 9 12:04:32 2017 +0530
----------------------------------------------------------------------
.../ZEPPELIN/0.7.0/package/scripts/master.py | 33 +++---
.../stacks/2.6/ZEPPELIN/test_zeppelin_070.py | 101 +++++--------------
2 files changed, 45 insertions(+), 89 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/84e616da/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index a450fb6..d615d06 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -192,18 +192,8 @@ class Master(Script):
notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
- kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
- kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
-
- notebook_directory_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {notebook_directory};echo $?"),
- user=params.zeppelin_user)[1]
-
- #if there is no kerberos setup then the string will contain "-bash: kinit: command not found"
- if "\n" in notebook_directory_exists:
- notebook_directory_exists = notebook_directory_exists.split("\n")[1]
- # '1' means it does not exists
- if notebook_directory_exists == '1':
+ if self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user):
# hdfs dfs -mkdir {notebook_directory}
params.HdfsResource(format("{notebook_directory}"),
type="directory",
@@ -310,6 +300,22 @@ class Master(Script):
return hdfs_interpreter_config
+ def is_path_exists_in_HDFS(self, path, as_user):
+ kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+ kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
+ path_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {path};echo $?"),
+ user=as_user)[1]
+
+ # if there is no kerberos setup then the string will contain "-bash: kinit: command not found"
+ if "\n" in path_exists:
+ path_exists = path_exists.split("\n")[1]
+
+ # '1' means it does not exists
+ if path_exists == '0':
+ return True
+ else:
+ return False
+
def get_interpreter_settings(self):
import params
import json
@@ -320,12 +326,14 @@ class Master(Script):
if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']:
zeppelin_conf_fs = self.getZeppelinConfFS(params)
- if os.path.exists(zeppelin_conf_fs):
+
+ if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
# copy from hdfs to /etc/zeppelin/conf/interpreter.json
params.HdfsResource(interpreter_config,
type="file",
action="download_on_execute",
source=zeppelin_conf_fs,
+ user=params.zeppelin_user,
group=params.zeppelin_group,
owner=params.zeppelin_user)
else:
@@ -353,6 +361,7 @@ class Master(Script):
type="file",
action="create_on_execute",
source=interpreter_config,
+ user=params.zeppelin_user,
group=params.zeppelin_group,
owner=params.zeppelin_user,
replace_existing_files=True)
http://git-wip-us.apache.org/repos/asf/ambari/blob/84e616da/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index e8ef262..3064880 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -305,67 +305,32 @@ class TestZeppelin070(RMFTestCase):
security_enabled=False,
)
- self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
- security_enabled = False,
- hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
- keytab = UnknownConfigurationMock(),
- source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hdfs_site = {u'a': u'b'},
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- owner = 'zeppelin',
- group = 'zeppelin',
- hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
- type = 'file',
- action = ['download_on_execute'],
- )
-
self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
- content=interpreter_json_generated.template_after_base,
+ content=interpreter_json_generated.template_after_base,
+ owner='zeppelin',
+ group='zeppelin',
+ )
+
+ self.assertResourceCalled('HdfsResource',
+ '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+ security_enabled=False,
+ hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
+ keytab=UnknownConfigurationMock(),
+ source='/etc/zeppelin/conf/interpreter.json',
+ default_fs='hdfs://c6401.ambari.apache.org:8020',
+ hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ hdfs_site={u'a': u'b'},
+ kinit_path_local='/usr/bin/kinit',
+ principal_name=UnknownConfigurationMock(),
+ user='zeppelin',
owner='zeppelin',
group='zeppelin',
+ replace_existing_files=True,
+ hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
+ type='file',
+ action=['create_on_execute'],
)
- self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
- security_enabled = False,
- hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
- keytab = UnknownConfigurationMock(),
- source = '/etc/zeppelin/conf/interpreter.json',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- replace_existing_files = True,
- hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hdfs_site = {u'a': u'b'},
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- owner = 'zeppelin',
- group = 'zeppelin',
- hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
- type = 'file',
- action = ['create_on_execute'],
- )
-
- self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
- security_enabled = False,
- hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
- keytab = UnknownConfigurationMock(),
- source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hdfs_site = {u'a': u'b'},
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- owner = 'zeppelin',
- group = 'zeppelin',
- hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
- type = 'file',
- action = ['download_on_execute'],
- )
-
self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
content=interpreter_json_generated.template_after_without_spark_and_livy,
owner='zeppelin',
@@ -383,7 +348,7 @@ class TestZeppelin070(RMFTestCase):
hdfs_site = {u'a': u'b'},
kinit_path_local = '/usr/bin/kinit',
principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
+ user = 'zeppelin',
owner = 'zeppelin',
group = 'zeppelin',
hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
@@ -391,24 +356,6 @@ class TestZeppelin070(RMFTestCase):
action = ['create_on_execute'],
)
- self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
- security_enabled = False,
- hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
- keytab = UnknownConfigurationMock(),
- source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hdfs_site = {u'a': u'b'},
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- owner = 'zeppelin',
- group = 'zeppelin',
- hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
- type = 'file',
- action = ['download_on_execute'],
- )
-
self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
content=interpreter_json_generated.template_after_kerberos,
owner='zeppelin',
@@ -421,12 +368,12 @@ class TestZeppelin070(RMFTestCase):
keytab = UnknownConfigurationMock(),
source = '/etc/zeppelin/conf/interpreter.json',
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- replace_existing_files = True,
hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
hdfs_site = {u'a': u'b'},
kinit_path_local = '/usr/bin/kinit',
principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
+ replace_existing_files = True,
+ user = 'zeppelin',
owner = 'zeppelin',
group = 'zeppelin',
hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',