You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by wu...@apache.org on 2022/11/05 09:57:18 UTC

[ambari] branch trunk updated: AMBARI-25772: Upgrade Zeppelin for BIGTOP to be compatible with bigtop-select (#3455)

This is an automated email from the ASF dual-hosted git repository.

wuzhiguo pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new b5ecc35764 AMBARI-25772: Upgrade Zeppelin for BIGTOP to be compatible with bigtop-select (#3455)
b5ecc35764 is described below

commit b5ecc35764f19292658e45960a450161f4573293
Author: Yu Hou <52...@qq.com>
AuthorDate: Sat Nov 5 17:57:11 2022 +0800

    AMBARI-25772: Upgrade Zeppelin for BIGTOP to be compatible with bigtop-select (#3455)
---
 .../services/ZEPPELIN/package/scripts/master.py    | 33 ++++++------
 .../services/ZEPPELIN/package/scripts/params.py    | 58 ++++++++++------------
 2 files changed, 40 insertions(+), 51 deletions(-)

diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/master.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/master.py
index 619d4e082f..10c42be092 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/master.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/master.py
@@ -52,9 +52,6 @@ class Master(Script):
     if params.spark_version:
       Execute('echo spark_version:' + str(params.spark_version) + ' detected for spark_home: '
               + params.spark_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user)
-    if params.spark2_version:
-      Execute('echo spark2_version:' + str(params.spark2_version) + ' detected for spark2_home: '
-              + params.spark2_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user)
 
   def create_zeppelin_dir(self, params):
     params.HdfsResource(format("/user/{zeppelin_user}"),
@@ -120,7 +117,7 @@ class Master(Script):
     self.create_zeppelin_log_dir(env)
 
     # create the pid and zeppelin dirs
-    Directory([params.zeppelin_pid_dir, params.zeppelin_dir],
+    Directory([params.zeppelin_pid_dir, params.zeppelin_home],
               owner=params.zeppelin_user,
               group=params.zeppelin_group,
               cd_access="a",
@@ -130,23 +127,23 @@ class Master(Script):
     self.chown_zeppelin_pid_dir(env)
 
     XmlConfig("zeppelin-site.xml",
-              conf_dir=params.conf_dir,
+              conf_dir=params.zeppelin_conf_dir,
               configurations=params.config['configurations']['zeppelin-site'],
               owner=params.zeppelin_user,
               group=params.zeppelin_group
               )
     # write out zeppelin-env.sh
     env_content = InlineTemplate(params.zeppelin_env_content)
-    File(format("{params.conf_dir}/zeppelin-env.sh"), content=env_content,
+    File(format("{params.zeppelin_conf_dir}/zeppelin-env.sh"), content=env_content,
          owner=params.zeppelin_user, group=params.zeppelin_group)
 
     # write out shiro.ini
     shiro_ini_content = InlineTemplate(params.shiro_ini_content)
-    File(format("{params.conf_dir}/shiro.ini"), content=shiro_ini_content,
+    File(format("{params.zeppelin_conf_dir}/shiro.ini"), content=shiro_ini_content,
          owner=params.zeppelin_user, group=params.zeppelin_group)
 
     # write out log4j.properties
-    File(format("{params.conf_dir}/log4j.properties"), content=params.log4j_properties_content,
+    File(format("{params.zeppelin_conf_dir}/log4j.properties"), content=params.log4j_properties_content,
          owner=params.zeppelin_user, group=params.zeppelin_group)
 
     self.create_zeppelin_hdfs_conf_dir(env)
@@ -218,7 +215,7 @@ class Master(Script):
     import params
     self.create_zeppelin_log_dir(env)
     self.chown_zeppelin_pid_dir(env)
-    Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh stop >> ' + params.zeppelin_log_file,
+    Execute(params.zeppelin_home + '/bin/zeppelin-daemon.sh stop >> ' + params.zeppelin_log_file,
             user=params.zeppelin_user)
 
   def start(self, env, upgrade_type=None):
@@ -254,15 +251,15 @@ class Master(Script):
                             )
 
     # if first_setup:
-    if not glob.glob(params.conf_dir + "/interpreter.json") and \
-      not os.path.exists(params.conf_dir + "/interpreter.json"):
+    if not glob.glob(params.zeppelin_conf_dir + "/interpreter.json") and \
+      not os.path.exists(params.zeppelin_conf_dir + "/interpreter.json"):
       self.create_interpreter_json()
 
     if params.zeppelin_interpreter_config_upgrade == True:
       self.reset_interpreter_settings(upgrade_type)
       self.update_zeppelin_interpreter()
 
-    Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh restart >> '
+    Execute(params.zeppelin_home + '/bin/zeppelin-daemon.sh restart >> '
             + params.zeppelin_log_file, user=params.zeppelin_user)
     pidfile = glob.glob(os.path.join(status_params.zeppelin_pid_dir,
                                      'zeppelin-' + params.zeppelin_user + '*.pid'))[0]
@@ -414,7 +411,7 @@ class Master(Script):
           kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
         else:
           kinit_if_needed = ''
-        interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
+        interpreter_config = os.path.join(params.zeppelin_conf_dir, "interpreter.json")
         shell.call(format("rm {interpreter_config};"
             "{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"),
             user=params.zeppelin_user)
@@ -426,7 +423,7 @@ class Master(Script):
     import json
 
     self.copy_interpreter_from_HDFS_to_FS(params)
-    interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
+    interpreter_config = os.path.join(params.zeppelin_conf_dir, "interpreter.json")
     config_content = sudo.read_file(interpreter_config)
     config_data = json.loads(config_content)
     return config_data
@@ -435,7 +432,7 @@ class Master(Script):
     import params
     import json
 
-    interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
+    interpreter_config = os.path.join(params.zeppelin_conf_dir, "interpreter.json")
     File(interpreter_config,
          group=params.zeppelin_group,
          owner=params.zeppelin_user,
@@ -653,7 +650,7 @@ class Master(Script):
 
     if not self.copy_interpreter_from_HDFS_to_FS(params):
       interpreter_json = interpreter_json_template.template
-      File(format("{params.conf_dir}/interpreter.json"),
+      File(format("{params.zeppelin_conf_dir}/interpreter.json"),
            content=interpreter_json,
            owner=params.zeppelin_user,
            group=params.zeppelin_group,
@@ -663,7 +660,7 @@ class Master(Script):
         params.HdfsResource(self.get_zeppelin_conf_FS(params),
                             type="file",
                             action="create_on_execute",
-                            source=format("{params.conf_dir}/interpreter.json"),
+                            source=format("{params.zeppelin_conf_dir}/interpreter.json"),
                             owner=params.zeppelin_user,
                             recursive_chown=True,
                             recursive_chmod=True,
@@ -671,7 +668,7 @@ class Master(Script):
 
   def get_zeppelin_spark_dependencies(self):
     import params
-    return glob.glob(params.zeppelin_dir + '/interpreter/spark/dep/zeppelin-spark-dependencies*.jar')
+    return glob.glob(params.zeppelin_home + '/interpreter/spark/dep/zeppelin-spark-dependencies*.jar')
 
 if __name__ == "__main__":
   Master().execute()
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/params.py
index 051e3e73aa..89095308d2 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/ZEPPELIN/package/scripts/params.py
@@ -50,16 +50,20 @@ def extract_spark_version(spark_home):
 
 # server configurations
 config = Script.get_config()
-# stack_root = Script.get_stack_root()
-stack_root = "/usr/lib"
+stack_root = Script.get_stack_root()
+# e.g. 2.3
+stack_version_unformatted = config['clusterLevelParams']['stack_version']
+# e.g. 2.3.0.0
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+# e.g. 2.3.0.0-2130
+version = default("/commandParams/version", None)
+stack_name = default("/clusterLevelParams/stack_name", None)
 
 # e.g. /var/lib/ambari-agent/cache/stacks/HDP/2.2/services/zeppelin-stack/package
 service_packagedir = os.path.realpath(__file__).split('/scripts')[0]
 
-zeppelin_dirname = 'zeppelin-server'
-
-install_dir = os.path.join(stack_root, "zeppelin")
-
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
 ui_ssl_enabled = config['configurations']['zeppelin-site']['zeppelin.ssl']
@@ -68,20 +72,6 @@ is_ui_ssl_enabled = str(ui_ssl_enabled).upper() == 'TRUE'
 setup_view = True
 temp_file = config['configurations']['zeppelin-env']['zeppelin.temp.file']
 
-spark_home = config['configurations']['zeppelin-env']['spark_home']
-spark_version = None
-spark2_home = ""
-spark2_version = None
-if 'spark-defaults' in config['configurations']:
-  spark_home = os.path.join(stack_root, "current", 'spark-client')
-  spark_version = extract_spark_version(spark_home)
-if 'spark2-defaults' in config['configurations']:
-  spark2_home = os.path.join(stack_root, "current", 'spark2-client')
-  spark2_version = extract_spark_version(spark2_home)
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
-version = default("/commandParams/version", None)
-stack_name = default("/clusterLevelParams/stack_name", None)
 
 # params from zeppelin-site
 zeppelin_port = str(config['configurations']['zeppelin-site']['zeppelin.server.port'])
@@ -107,9 +97,21 @@ hbase_conf_dir = config['configurations']['zeppelin-env']['hbase_conf_dir']
 zeppelin_log_file = os.path.join(zeppelin_log_dir, 'zeppelin-setup.log')
 zeppelin_hdfs_user_dir = format("/user/{zeppelin_user}")
 
-zeppelin_dir = install_dir
-conf_dir = "/etc/zeppelin/conf"
-external_dependency_conf = "/etc/zeppelin/conf/external-dependency-conf"
+zeppelin_conf_dir = "/etc/zeppelin/conf"
+external_dependency_conf = format("{zeppelin_conf_dir}/external-dependency-conf")
+zeppelin_home = "/usr/lib/zeppelin"
+
+spark_home = config['configurations']['zeppelin-env']['spark_home']
+
+if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
+  spark_home = format("{stack_root}/current/spark-client")
+  hbase_home = format("{stack_root}/current/hbase-client")
+  zeppelin_home = format("{stack_root}/current/zeppelin-server")
+  local_notebook_dir = format("{stack_root}/{stack_version_formatted}/{local_notebook_dir}")
+
+spark_version = None
+if 'spark-defaults' in config['configurations']:
+  spark_version = extract_spark_version(spark_home)
 
 conf_stored_in_hdfs = False
 if 'zeppelin.config.fs.dir' in config['configurations']['zeppelin-site'] and \
@@ -244,17 +246,7 @@ else:
 
 exclude_interpreter_autoconfig = default("/configurations/zeppelin-site/exclude.interpreter.autoconfig", None)
 
-# e.g. 2.3
-stack_version_unformatted = config['clusterLevelParams']['stack_version']
-
-# e.g. 2.3.0.0
-stack_version_formatted = format_stack_version(stack_version_unformatted)
-major_stack_version = get_major_version(stack_version_formatted)
-
-# e.g. 2.3.0.0-2130
-full_stack_version = default("/commandParams/version", None)
 
-spark_client_version = get_stack_version('spark-client')
 
 hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
 livy_hosts = default("/clusterHostInfo/livy_server_hosts", [])


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ambari.apache.org
For additional commands, e-mail: commits-help@ambari.apache.org