You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@streampark.apache.org by lv...@apache.org on 2022/09/26 16:12:46 UTC

[incubator-streampark] branch dev updated: [Improve] Replace hardcode in code with constants (#1698)

This is an automated email from the ASF dual-hosted git repository.

lvshaokang pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampark.git


The following commit(s) were added to refs/heads/dev by this push:
     new 7a56c09d1 [Improve] Replace hardcode in code with constants (#1698)
7a56c09d1 is described below

commit 7a56c09d1c722db884a3de5623c2a8752ff4a3db
Author: 1996fanrui <19...@gmail.com>
AuthorDate: Tue Sep 27 00:12:37 2022 +0800

    [Improve] Replace hardcode in code with constants (#1698)
---
 .../scala/org/apache/streampark/common/util/HadoopUtils.scala     | 7 ++++---
 .../java/org/apache/streampark/console/base/util/WebUtils.java    | 3 ++-
 .../org/apache/streampark/console/core/runner/EnvInitializer.java | 7 ++++---
 .../scala/org/apache/streampark/flink/proxy/FlinkShimsProxy.scala | 8 +++++---
 .../apache/streampark/flink/submit/trait/FlinkSubmitTrait.scala   | 6 +++---
 5 files changed, 18 insertions(+), 13 deletions(-)

diff --git a/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala b/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala
index 3b2f0067b..cc24fa5c3 100644
--- a/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala
+++ b/streampark-common/src/main/scala/org/apache/streampark/common/util/HadoopUtils.scala
@@ -18,7 +18,7 @@
 package org.apache.streampark.common.util
 
 import org.apache.streampark.common.conf.ConfigConst._
-import org.apache.streampark.common.conf.{CommonConfig, InternalConfigHolder}
+import org.apache.streampark.common.conf.{CommonConfig, ConfigConst, InternalConfigHolder}
 import org.apache.commons.collections.CollectionUtils
 import org.apache.commons.lang3.StringUtils
 import org.apache.hadoop.conf.Configuration
@@ -29,13 +29,14 @@ import org.apache.hadoop.service.Service.STATE
 import org.apache.hadoop.yarn.api.records.ApplicationId
 import org.apache.hadoop.yarn.client.api.YarnClient
 import org.apache.hadoop.yarn.conf.YarnConfiguration
-
 import java.io.{File, IOException}
 import java.security.PrivilegedAction
 import java.util
 import java.util.concurrent._
 import java.util.{Timer, TimerTask}
+
 import javax.security.auth.kerberos.KerberosTicket
+
 import scala.collection.JavaConversions._
 import scala.util.{Failure, Success, Try}
 
@@ -62,7 +63,7 @@ object HadoopUtils extends Logger {
 
   private[this] lazy val configurationCache: util.Map[String, Configuration] = new ConcurrentHashMap[String, Configuration]()
 
-  private[this] lazy val kerberosConf: Map[String, String] = SystemPropertyUtils.get("app.home", null) match {
+  private[this] lazy val kerberosConf: Map[String, String] = SystemPropertyUtils.get(ConfigConst.KEY_APP_HOME, null) match {
     case null =>
       getClass.getResourceAsStream("/kerberos.yml") match {
         case x if x != null => PropertiesUtils.fromYamlFile(x)
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/WebUtils.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/WebUtils.java
index 5ee58b6c0..7cf5d00bf 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/WebUtils.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/WebUtils.java
@@ -17,6 +17,7 @@
 
 package org.apache.streampark.console.base.util;
 
+import org.apache.streampark.common.conf.ConfigConst;
 import org.apache.streampark.console.base.domain.Constant;
 
 import com.baomidou.mybatisplus.core.toolkit.StringPool;
@@ -94,7 +95,7 @@ public final class WebUtils {
     }
 
     public static String getAppHome() {
-        return System.getProperty("app.home");
+        return System.getProperty(ConfigConst.KEY_APP_HOME());
     }
 
     public static File getAppDir(String dir) {
diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java
index dab0f5d2a..cf3eef7a0 100644
--- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java
+++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java
@@ -73,10 +73,11 @@ public class EnvInitializer implements ApplicationRunner {
     public void run(ApplicationArguments args) throws Exception {
         String appHome = WebUtils.getAppHome();
         if (appHome == null) {
-            throw new ExceptionInInitializerError("[StreamPark] System initialization check failed," +
+            throw new ExceptionInInitializerError(String.format("[StreamPark] System initialization check failed," +
                 " The system initialization check failed. If started local for development and debugging," +
-                " please ensure the -Dapp.home parameter is clearly specified," +
-                " more detail: https://streampark.apache.org/docs/user-guide/development");
+                " please ensure the -D%s parameter is clearly specified," +
+                " more detail: https://streampark.apache.org/docs/user-guide/development",
+                ConfigConst.KEY_APP_HOME()));
         }
 
         // init InternalConfig
diff --git a/streampark-flink/streampark-flink-proxy/src/main/scala/org/apache/streampark/flink/proxy/FlinkShimsProxy.scala b/streampark-flink/streampark-flink-proxy/src/main/scala/org/apache/streampark/flink/proxy/FlinkShimsProxy.scala
index 7419a79f5..46b8ce8e8 100644
--- a/streampark-flink/streampark-flink-proxy/src/main/scala/org/apache/streampark/flink/proxy/FlinkShimsProxy.scala
+++ b/streampark-flink/streampark-flink-proxy/src/main/scala/org/apache/streampark/flink/proxy/FlinkShimsProxy.scala
@@ -19,11 +19,13 @@ package org.apache.streampark.flink.proxy
 
 import org.apache.streampark.common.domain.FlinkVersion
 import org.apache.streampark.common.util.{ClassLoaderUtils, Logger, Utils}
-
 import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, ObjectOutputStream}
 import java.net.URL
 import java.util.function.{Supplier, Function => JavaFunc}
 import java.util.regex.Pattern
+
+import org.apache.streampark.common.conf.ConfigConst
+
 import scala.collection.mutable.{ListBuffer, Map => MutableMap}
 
 object FlinkShimsProxy extends Logger {
@@ -90,8 +92,8 @@ object FlinkShimsProxy extends Logger {
       val shimsUrls = ListBuffer[URL](libURL: _*)
 
       // 2) shims jar
-      val appHome = System.getProperty("app.home")
-      require(appHome != null, "app.home is not found on System env.")
+      val appHome = System.getProperty(ConfigConst.KEY_APP_HOME)
+      require(appHome != null, String.format("%s is not found on System env.", ConfigConst.KEY_APP_HOME))
 
       val libPath = new File(s"$appHome/lib")
       require(libPath.exists())
diff --git a/streampark-flink/streampark-flink-submit/streampark-flink-submit-core/src/main/scala/org/apache/streampark/flink/submit/trait/FlinkSubmitTrait.scala b/streampark-flink/streampark-flink-submit/streampark-flink-submit-core/src/main/scala/org/apache/streampark/flink/submit/trait/FlinkSubmitTrait.scala
index 3a9409268..bcc26e5f6 100644
--- a/streampark-flink/streampark-flink-submit/streampark-flink-submit-core/src/main/scala/org/apache/streampark/flink/submit/trait/FlinkSubmitTrait.scala
+++ b/streampark-flink/streampark-flink-submit/streampark-flink-submit-core/src/main/scala/org/apache/streampark/flink/submit/trait/FlinkSubmitTrait.scala
@@ -31,16 +31,16 @@ import org.apache.flink.runtime.jobgraph.{JobGraph, SavepointConfigOptions}
 import org.apache.flink.util.FlinkException
 import org.apache.flink.util.Preconditions.checkNotNull
 import org.apache.streampark.common.conf.ConfigConst._
-import org.apache.streampark.common.conf.Workspace
+import org.apache.streampark.common.conf.{ConfigConst, Workspace}
 import org.apache.streampark.common.enums.{ApplicationType, DevelopmentMode, ExecutionMode, ResolveOrder}
 import org.apache.streampark.common.util.{Logger, SystemPropertyUtils, Utils}
 import org.apache.streampark.flink.core.conf.FlinkRunOption
 import org.apache.streampark.flink.core.{ClusterClient => ClusterClientWrapper}
 import org.apache.streampark.flink.submit.bean._
-
 import java.io.File
 import java.util.concurrent.TimeUnit
 import java.util.{Collections, List => JavaList, Map => JavaMap}
+
 import scala.collection.JavaConversions._
 import scala.collection.JavaConverters._
 import scala.collection.mutable
@@ -187,7 +187,7 @@ trait FlinkSubmitTrait extends Logger {
   //----------Public Method end ------------------
 
   private[submit] lazy val jvmProfilerJar: String = {
-    val pluginsPath = SystemPropertyUtils.get("app.home").concat("/plugins")
+    val pluginsPath = SystemPropertyUtils.get(ConfigConst.KEY_APP_HOME).concat("/plugins")
     val pluginsDir = new File(pluginsPath)
     pluginsDir.list().filter(_.matches("streampark-jvm-profiler-.*\\.jar")) match {
       case Array() => throw new IllegalArgumentException(s"[StreamPark] can no found streampark-jvm-profiler jar in $pluginsPath")