You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by wu...@apache.org on 2022/11/08 18:15:32 UTC

[ambari] branch trunk updated: AMBARI-25771: Upgrade Spark for BIGTOP to be compatible with bigtop-select (#3454)

This is an automated email from the ASF dual-hosted git repository.

wuzhiguo pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 206ee2cb68 AMBARI-25771: Upgrade Spark for BIGTOP to be compatible with bigtop-select (#3454)
206ee2cb68 is described below

commit 206ee2cb68553aa624196f71f605cc9f5c665bf9
Author: Yu Hou <52...@qq.com>
AuthorDate: Wed Nov 9 02:15:26 2022 +0800

    AMBARI-25771: Upgrade Spark for BIGTOP to be compatible with bigtop-select (#3454)
---
 .../stacks/BIGTOP/3.2.0/properties/stack_packages.json    |  6 ++++--
 .../3.2.0/services/SPARK/configuration/spark-defaults.xml |  2 +-
 .../BIGTOP/3.2.0/services/SPARK/package/scripts/params.py | 15 ++++++++++-----
 .../3.2.0/services/SPARK/package/scripts/setup_spark.py   | 12 ++++++------
 4 files changed, 21 insertions(+), 14 deletions(-)

diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
index 5339b079b9..564ee02033 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/properties/stack_packages.json
@@ -247,7 +247,8 @@
         "SPARK_JOBHISTORYSERVER": {
           "STACK-SELECT-PACKAGE": "spark-historyserver",
           "INSTALL": [
-            "spark-historyserver"
+            "spark-historyserver",
+            "spark-client"
           ],
           "PATCH": [
             "spark-historyserver"
@@ -259,7 +260,8 @@
         "SPARK_THRIFTSERVER": {
           "STACK-SELECT-PACKAGE": "spark-thriftserver",
           "INSTALL": [
-            "spark-thriftserver"
+            "spark-thriftserver",
+            "spark-client"
           ],
           "PATCH": [
             "spark-thriftserver"
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/configuration/spark-defaults.xml b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/configuration/spark-defaults.xml
index f50bed1405..0b555e73f4 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/configuration/spark-defaults.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/configuration/spark-defaults.xml
@@ -104,7 +104,7 @@
   </property>
   <property>
     <name>spark.scheduler.allocation.file</name>
-    <value>file:///{{spark_conf}}/spark-thrift-fairscheduler.xml</value>
+    <value>file:///{{spark_conf_dir}}/spark-thrift-fairscheduler.xml</value>
     <description>
       Scheduler configuration file for thriftserver.
     </description>
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/params.py
index 9552c2d1e0..445ec70e47 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/params.py
@@ -74,20 +74,25 @@ sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
 version = default("/commandParams/version", None)
 
-spark_conf = '/etc/spark/conf'
+hadoop_home = stack_select.get_hadoop_dir("home")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 hive_home = '/usr/lib/hive'
-hive_conf_dir = format("{hive_home}/conf")
 yarn_application_classpath = config['configurations']['yarn-site']['yarn.application.classpath']
 
-hadoop_home = stack_select.get_hadoop_dir("home")
+spark_conf_dir = '/etc/spark/conf'
 spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
 spark_pid_dir = status_params.spark_pid_dir
 spark_home='/usr/lib/spark'
 
+if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
+  spark_home = format("{stack_root}/current/{component_directory}")
+  hive_home = format("{stack_root}/current/{hive_component_directory}")
+
+hive_conf_dir = format("{hive_home}/conf")
+
 spark_daemon_memory = config['configurations']['spark-env']['spark_daemon_memory']
-spark_thrift_server_conf_file = spark_conf + "/spark-defaults.conf"
+spark_thrift_server_conf_file = spark_conf_dir + "/spark-defaults.conf"
 java_home = config['ambariLevelParams']['java_home']
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
@@ -117,7 +122,7 @@ spark_history_server_stop = format("{spark_home}/sbin/stop-history-server.sh")
 
 spark_thrift_server_start = format("{spark_home}/sbin/start-thriftserver.sh")
 spark_thrift_server_stop = format("{spark_home}/sbin/stop-thriftserver.sh")
-spark_hadoop_lib_native = format("{stack_root}/current/hadoop-client/lib/native:{stack_root}/current/hadoop-client/lib/native/Linux-amd64-64")
+spark_hadoop_lib_native = format("{hadoop_home}/lib/native:{hadoop_home}/lib/native/Linux-amd64-64")
 
 run_example_cmd = format("{spark_home}/bin/run-example")
 spark_smoke_example = "SparkPi"
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/setup_spark.py b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/setup_spark.py
index f32d460c76..b7335b3ccf 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/package/scripts/setup_spark.py
@@ -101,7 +101,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   if not params.spark_warehouse_dir:
       spark_defaults.pop("spark.sql.warehouse.dir")
 
-  PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
+  PropertiesFile(format("{spark_conf_dir}/spark-defaults.conf"),
     properties = spark_defaults,
     key_value_delimiter = " ",
     owner=params.spark_user,
@@ -110,7 +110,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   )
 
   # create spark-env.sh in etc/conf dir
-  File(os.path.join(params.spark_conf, 'spark-env.sh'),
+  File(os.path.join(params.spark_conf_dir, 'spark-env.sh'),
        owner=params.spark_user,
        group=params.spark_group,
        content=InlineTemplate(params.spark_env_sh),
@@ -118,7 +118,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   )
 
   #create log4j.properties in etc/conf dir
-  File(os.path.join(params.spark_conf, 'log4j.properties'),
+  File(os.path.join(params.spark_conf_dir, 'log4j.properties'),
        owner=params.spark_user,
        group=params.spark_group,
        content=params.spark_log4j_properties,
@@ -126,7 +126,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   )
 
   #create metrics.properties in etc/conf dir
-  File(os.path.join(params.spark_conf, 'metrics.properties'),
+  File(os.path.join(params.spark_conf_dir, 'metrics.properties'),
        owner=params.spark_user,
        group=params.spark_group,
        content=InlineTemplate(params.spark_metrics_properties),
@@ -135,7 +135,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
 
   if params.is_hive_installed:
     XmlConfig("hive-site.xml",
-          conf_dir=params.spark_conf,
+          conf_dir=params.spark_conf_dir,
           configurations=params.spark_hive_properties,
           owner=params.spark_user,
           group=params.spark_group,
@@ -143,7 +143,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
 
   if params.spark_thrift_fairscheduler_content:
     # create spark-thrift-fairscheduler.xml
-    File(os.path.join(params.spark_conf,"spark-thrift-fairscheduler.xml"),
+    File(os.path.join(params.spark_conf_dir,"spark-thrift-fairscheduler.xml"),
       owner=params.spark_user,
       group=params.spark_group,
       mode=0755,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ambari.apache.org
For additional commands, e-mail: commits-help@ambari.apache.org