You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by fe...@apache.org on 2022/07/26 05:00:46 UTC

[incubator-kyuubi] branch master updated: [KYUUBI #3087] Convert the kyuubi batch conf with `spark.` prefix so that spark could identify

This is an automated email from the ASF dual-hosted git repository.

feiwang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new eb96db54d [KYUUBI #3087] Convert the kyuubi batch conf with `spark.` prefix so that spark could identify
eb96db54d is described below

commit eb96db54d419d43f7527f8202f9b862af9c4ff69
Author: Fei Wang <fw...@ebay.com>
AuthorDate: Tue Jul 26 13:00:37 2022 +0800

    [KYUUBI #3087] Convert the kyuubi batch conf with `spark.` prefix so that spark could identify
    
    ### _Why are the changes needed?_
    
    We might need transfer some parameters like `kyuubi.client.ipAddress`, but it can not be identified by spark.
    
    So we need convert them.
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #3087 from turboFei/batch_spark.
    
    Closes #3087
    
    bf614e40 [Fei Wang] comment
    58c651d8 [Fei Wang] refactor
    2c39c51b [Fei Wang] convert the kyuubi conf with spark. prefix, so that we can expose some info
    
    Authored-by: Fei Wang <fw...@ebay.com>
    Signed-off-by: Fei Wang <fw...@ebay.com>
---
 .../engine/spark/SparkBatchProcessBuilder.scala    |  2 +-
 .../kyuubi/engine/spark/SparkProcessBuilder.scala  | 32 ++++++++++++----------
 2 files changed, 18 insertions(+), 16 deletions(-)

diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala
index 657d9a436..89ce84180 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkBatchProcessBuilder.scala
@@ -51,7 +51,7 @@ class SparkBatchProcessBuilder(
 
     (batchKyuubiConf.getAll ++ sparkAppNameConf()).foreach { case (k, v) =>
       buffer += CONF
-      buffer += s"$k=$v"
+      buffer += s"${convertConfigKey(k)}=$v"
     }
 
     buffer += PROXY_USER
diff --git a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
index 544d3effe..4f39ba3a6 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
@@ -54,6 +54,22 @@ class SparkProcessBuilder(
 
   override def mainClass: String = "org.apache.kyuubi.engine.spark.SparkSQLEngine"
 
+  /**
+   * Converts kyuubi config key so that Spark could identify.
+   * - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf
+   * - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.`
+   * - Otherwise, the key will be added a `spark.` prefix
+   */
+  protected def convertConfigKey(key: String): String = {
+    if (key.startsWith("spark.")) {
+      key
+    } else if (key.startsWith("hadoop.")) {
+      "spark.hadoop." + key
+    } else {
+      "spark." + key
+    }
+  }
+
   override protected val commands: Array[String] = {
     KyuubiApplicationManager.tagApplication(engineRefId, shortName, clusterManager(), conf)
     val buffer = new ArrayBuffer[String]()
@@ -69,23 +85,9 @@ class SparkProcessBuilder(
       allConf = allConf ++ zkAuthKeytabFileConf(allConf)
     }
 
-    /**
-     * Converts kyuubi configs to configs that Spark could identify.
-     * - If the key is start with `spark.`, keep it AS IS as it is a Spark Conf
-     * - If the key is start with `hadoop.`, it will be prefixed with `spark.hadoop.`
-     * - Otherwise, the key will be added a `spark.` prefix
-     */
     allConf.foreach { case (k, v) =>
-      val newKey =
-        if (k.startsWith("spark.")) {
-          k
-        } else if (k.startsWith("hadoop.")) {
-          "spark.hadoop." + k
-        } else {
-          "spark." + k
-        }
       buffer += CONF
-      buffer += s"$newKey=$v"
+      buffer += s"${convertConfigKey(k)}=$v"
     }
 
     // iff the keytab is specified, PROXY_USER is not supported