You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@seatunnel.apache.org by ri...@apache.org on 2022/01/16 12:11:07 UTC

[incubator-seatunnel] branch bugfix-1034 created (now bd2fd44)

This is an automated email from the ASF dual-hosted git repository.

rickyhuo pushed a change to branch bugfix-1034
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git.


      at bd2fd44  Fix #1034

This branch includes the following new commits:

     new bd2fd44  Fix #1034

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


[incubator-seatunnel] 01/01: Fix #1034

Posted by ri...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

rickyhuo pushed a commit to branch bugfix-1034
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git

commit bd2fd442f74fa09666e0cc66fe72c57221e19e44
Author: rickyhuo <hu...@163.com>
AuthorDate: Sun Jan 16 20:10:49 2022 +0800

    Fix #1034
---
 bin/start-seatunnel-structured-streaming.sh | 34 ++++++++++-------------------
 1 file changed, 12 insertions(+), 22 deletions(-)

diff --git a/bin/start-seatunnel-structured-streaming.sh b/bin/start-seatunnel-structured-streaming.sh
index 5a2965d..937106b 100755
--- a/bin/start-seatunnel-structured-streaming.sh
+++ b/bin/start-seatunnel-structured-streaming.sh
@@ -21,6 +21,8 @@
 CMD_ARGUMENTS=$@
 
 PARAMS=""
+variables_substitution="-Ddefault=seatunnel"
+
 while (( "$#" )); do
   case "$1" in
     -m|--master)
@@ -50,6 +52,7 @@ while (( "$#" )); do
       shift 2
       ;;
 
+
     --) # end argument parsing
       shift
       break
@@ -125,9 +128,9 @@ string_trim() {
 
 variables_substitution=$(string_trim "${variables_substitution}")
 
-## get spark conf from config file and specify them in spark-submit
+## get spark conf from config file and specify them in spark-submit --conf
 function get_spark_conf {
-    spark_conf=$(java ${variables_substitution} -cp ${assemblyJarName} io.github.interestinglab.waterdrop.config.ExposeSparkConf ${CONFIG_FILE})
+    spark_conf=$(java ${variables_substitution} -cp ${assemblyJarName} io.github.interestinglab.waterdrop.config.ExposeSparkConf ${CONFIG_FILE} "${variables_substitution}")
     if [ "$?" != "0" ]; then
         echo "[ERROR] config file does not exists or cannot be parsed due to invalid format"
         exit -1
@@ -135,20 +138,9 @@ function get_spark_conf {
     echo ${spark_conf}
 }
 
-sparkconf=$(get_spark_conf)
-
-echo "[INFO] spark conf: ${sparkconf}"
+sparkConf=$(get_spark_conf)
 
-# Spark Driver Options
-driverJavaOpts=""
-executorJavaOpts=""
-clientModeDriverJavaOpts=""
-if [ ! -z "${variables_substitution}" ]; then
-  driverJavaOpts="${variables_substitution}"
-  executorJavaOpts="${variables_substitution}"
-  # in local, client mode, driverJavaOpts can not work, we must use --driver-java-options
-  clientModeDriverJavaOpts="${variables_substitution}"
-fi
+echo "[INFO] spark conf: ${sparkConf}"
 
 
 ## compress plugins.tar.gz in cluster mode
@@ -170,16 +162,14 @@ if [ "${DEPLOY_MODE}" == "cluster" ]; then
   fi
 fi
 
-
-exec ${SPARK_HOME}/bin/spark-submit --class io.github.interestinglab.waterdrop.WaterdropStructuredStreaming \
+CMD=(${SPARK_HOME}/bin/spark-submit --class io.github.interestinglab.waterdrop.WaterdropStructuredStreaming \
     --name $(getAppName ${CONFIG_FILE}) \
     --master ${MASTER} \
     --deploy-mode ${DEPLOY_MODE} \
     --queue "${QUEUE}" \
-    --driver-java-options "${clientModeDriverJavaOpts}" \
-    --conf spark.executor.extraJavaOptions="${executorJavaOpts}" \
-    --conf spark.driver.extraJavaOptions="${driverJavaOpts}" \
-    ${sparkconf} \
+    "${sparkConf}" \
     ${JarDepOpts} \
     ${FilesDepOpts} \
-    ${assemblyJarName} ${CMD_ARGUMENTS}
+    ${assemblyJarName} ${CMD_ARGUMENTS})
+
+eval "${CMD[@]}"