You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by pe...@apache.org on 2022/09/16 06:17:07 UTC

[incubator-linkis] branch dev-1.3.1 updated: Test case for spark executor (#3428)

This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.3.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.3.1 by this push:
     new 5cf03104d Test case for spark executor (#3428)
5cf03104d is described below

commit 5cf03104dc3117468e0c0042f19b9c3fd2dde154
Author: QuintinTao <72...@users.noreply.github.com>
AuthorDate: Fri Sep 16 14:17:00 2022 +0800

    Test case for spark executor (#3428)
---
 linkis-engineconn-plugins/spark/pom.xml            |   6 +
 .../spark/config/SparkConfiguration.scala          |   4 +-
 .../engineplugin/spark/cs/TestCSSparkHelper.scala  |  59 ++++++++++
 .../spark/cs/TestCSSparkPostExecutionHook.scala    |  74 +++++++++++++
 .../spark/executor/TestSparkSqlExecutor.scala      | 121 +++++++++++++++++++++
 5 files changed, 262 insertions(+), 2 deletions(-)

diff --git a/linkis-engineconn-plugins/spark/pom.xml b/linkis-engineconn-plugins/spark/pom.xml
index 93f16ff8c..78743bf93 100644
--- a/linkis-engineconn-plugins/spark/pom.xml
+++ b/linkis-engineconn-plugins/spark/pom.xml
@@ -377,6 +377,12 @@
       <version>3.0.9</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.github.pjfanning</groupId>
+      <artifactId>excel-streaming-reader</artifactId>
+      <version>4.0.1</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala
index 5abd254bf..6faf30ce3 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/config/SparkConfiguration.scala
@@ -73,11 +73,11 @@ object SparkConfiguration extends Logging {
 
   val LINKIS_SPARK_USEHIVECONTEXT = CommonVars[Boolean]("wds.linkis.spark.useHiveContext", true)
 
-  val ENGINE_JAR = CommonVars[String]("wds.linkis.enginemanager.core.jar", getMainJarName)
-
   val DEFAULT_SPARK_JAR_NAME =
     CommonVars[String]("wds.linkis.ecp.spark.default.jar", "linkis-engineconn-core-1.3.0.jar")
 
+  val ENGINE_JAR = CommonVars[String]("wds.linkis.enginemanager.core.jar", getMainJarName)
+
   val SPARK_DRIVER_CLASSPATH = CommonVars[String]("spark.driver.extraClassPath", "")
 
   val SPARK_DRIVER_EXTRA_JAVA_OPTIONS = CommonVars[String](
diff --git a/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/cs/TestCSSparkHelper.scala b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/cs/TestCSSparkHelper.scala
new file mode 100644
index 000000000..8c3b8f44f
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/cs/TestCSSparkHelper.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.cs
+
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.cs.common.utils.CSCommonUtils
+import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
+import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.executor.SparkScalaExecutor
+import org.apache.linkis.engineplugin.spark.factory.SparkEngineConnFactory
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.SparkSession
+
+import org.junit.jupiter.api.{Assertions, Test}
+
+class TestCSSparkHelper {
+
+  @Test
+  def testCSSparkHelper: Unit = {
+    val engineFactory = new SparkEngineConnFactory
+    val sparkConf: SparkConf = new SparkConf(true)
+    val sparkSession = SparkSession
+      .builder()
+      .master("local[1]")
+      .appName("test")
+      .getOrCreate()
+    val outputDir = engineFactory.createOutputDir(sparkConf)
+    val sparkEngineSession = SparkEngineSession(
+      sparkSession.sparkContext,
+      sparkSession.sqlContext,
+      sparkSession,
+      outputDir
+    )
+    val sparkScalaExecutor = new SparkScalaExecutor(sparkEngineSession, 1L)
+    Assertions.assertFalse(sparkScalaExecutor.isEngineInitialized)
+    sparkScalaExecutor.init()
+    Assertions.assertTrue(sparkScalaExecutor.isEngineInitialized)
+    val engineExecutionContext = new EngineExecutionContext(sparkScalaExecutor, Utils.getJvmUser)
+    CSSparkHelper.setContextIDInfoToSparkConf(engineExecutionContext, sparkSession.sparkContext)
+//    Assertions.assertNotNull(sparkSession.sparkContext.getLocalProperty(CSCommonUtils.CONTEXT_ID_STR))
+  }
+
+}
diff --git a/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/cs/TestCSSparkPostExecutionHook.scala b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/cs/TestCSSparkPostExecutionHook.scala
new file mode 100644
index 000000000..5f1322938
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/cs/TestCSSparkPostExecutionHook.scala
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.cs
+
+import org.apache.linkis.common.io.FsPath
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
+import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.executor.SparkScalaExecutor
+import org.apache.linkis.engineplugin.spark.factory.SparkEngineConnFactory
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.SparkSession
+
+import org.junit.jupiter.api.{Assertions, Test}
+
+class TestCSSparkPostExecutionHook {
+
+  @Test
+  def testCreateContext: Unit = {
+    val hook = new CSSparkPostExecutionHook
+    val hookPre = new CSSparkPreExecutionHook
+    val engineFactory = new SparkEngineConnFactory
+    val sparkConf: SparkConf = new SparkConf(true)
+    val path = this.getClass.getResource("/").getPath
+    System.setProperty("java.io.tmpdir", path)
+    System.setProperty("wds.linkis.filesystem.hdfs.root.path", path)
+    val sparkSession = SparkSession
+      .builder()
+      .master("local[1]")
+      .appName("test")
+      .getOrCreate()
+    val outputDir = engineFactory.createOutputDir(sparkConf)
+    val sparkEngineSession = SparkEngineSession(
+      sparkSession.sparkContext,
+      sparkSession.sqlContext,
+      sparkSession,
+      outputDir
+    )
+    val sparkScalaExecutor = new SparkScalaExecutor(sparkEngineSession, 1L)
+
+    Assertions.assertFalse(sparkScalaExecutor.isEngineInitialized)
+
+    if (!FsPath.WINDOWS) {
+      sparkScalaExecutor.init()
+      Assertions.assertTrue(sparkScalaExecutor.isEngineInitialized)
+      val engineExecutionContext = new EngineExecutionContext(sparkScalaExecutor, Utils.getJvmUser)
+      val code = "val dataFrame = spark.createDataFrame(Seq(\n      " +
+        "(\"ming\", 20, 15552211521L),\n      " +
+        "(\"hong\", 19, 13287994007L),\n      " +
+        "(\"zhi\", 21, 15552211523L)\n    )).toDF(\"name\", \"age\", \"phone\") \n" +
+        "dataFrame.show()\n";
+      hookPre.callPreExecutionHook(engineExecutionContext, code)
+      val response = sparkScalaExecutor.executeLine(engineExecutionContext, code)
+      hook.callPostExecutionHook(engineExecutionContext, response, code)
+    }
+  }
+
+}
diff --git a/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestSparkSqlExecutor.scala b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestSparkSqlExecutor.scala
new file mode 100644
index 000000000..4b627bba7
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/executor/TestSparkSqlExecutor.scala
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.executor
+
+import org.apache.linkis.DataWorkCloudApplication
+import org.apache.linkis.common.conf.DWCArgumentsParser
+import org.apache.linkis.common.io.FsPath
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
+import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.factory.SparkEngineConnFactory
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.SparkSession
+
+import scala.collection.mutable
+
+import org.junit.jupiter.api.{Assertions, Test}
+
+class TestSparkSqlExecutor {
+
+  def initService(port: String): Unit = {
+    System.setProperty("wds.linkis.server.version", "v1")
+    System.setProperty(
+      "wds.linkis.engineconn.plugin.default.class",
+      "org.apache.linkis.engineplugin.spark.SparkEngineConnPlugin"
+    )
+    val map = new mutable.HashMap[String, String]()
+    map.put("spring.mvc.servlet.path", "/api/rest_j/v1")
+    map.put("server.port", port)
+    map.put("spring.application.name", "SparkSqlExecutor")
+    map.put("eureka.client.register-with-eureka", "false")
+    map.put("eureka.client.fetch-registry", "false")
+    DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(map.toMap))
+  }
+
+  @Test
+  def testCreateContext: Unit = {
+    initService("26378")
+    val engineFactory = new SparkEngineConnFactory
+    val sparkConf = new SparkConf(true)
+    val path = this.getClass.getResource("/").getPath
+    System.setProperty("java.io.tmpdir", path)
+    val sparkSession = SparkSession
+      .builder()
+      .master("local[*]")
+      .appName("testSparkSqlExecutor")
+      .getOrCreate()
+    val outputDir = engineFactory.createOutputDir(sparkConf)
+    val sparkEngineSession = SparkEngineSession(
+      sparkSession.sparkContext,
+      sparkSession.sqlContext,
+      sparkSession,
+      outputDir
+    )
+    val sparkSqlExecutor = new SparkSqlExecutor(sparkEngineSession, 1L)
+    Assertions.assertFalse(sparkSqlExecutor.isEngineInitialized)
+    sparkSqlExecutor.init()
+    Assertions.assertTrue(sparkSqlExecutor.isEngineInitialized)
+    val engineExecutionContext = new EngineExecutionContext(sparkSqlExecutor, Utils.getJvmUser)
+    val code = "select * from temp"
+    val response = sparkSqlExecutor.executeLine(engineExecutionContext, code)
+    Assertions.assertNotNull(response)
+  }
+
+  @Test
+  def testShowDF: Unit = {
+    if (!FsPath.WINDOWS) {
+      initService("26379")
+      val engineFactory = new SparkEngineConnFactory
+      val sparkConf: SparkConf = new SparkConf(true)
+      val path = this.getClass.getResource("/").getPath
+      System.setProperty("HADOOP_CONF_DIR", path)
+      System.setProperty("wds.linkis.filesystem.hdfs.root.path", path)
+      System.setProperty("java.io.tmpdir", path)
+      val sparkSession = SparkSession
+        .builder()
+        .master("local[1]")
+        .appName("testShowDF")
+        .getOrCreate()
+      val outputDir = engineFactory.createOutputDir(sparkConf)
+      val sparkEngineSession = SparkEngineSession(
+        sparkSession.sparkContext,
+        sparkSession.sqlContext,
+        sparkSession,
+        outputDir
+      )
+      val sparkScalaExecutor = new SparkScalaExecutor(sparkEngineSession, 1L)
+      val engineExecutionContext = new EngineExecutionContext(sparkScalaExecutor, Utils.getJvmUser)
+      val dataFrame = sparkSession
+        .createDataFrame(
+          Seq(("ming", 20, 15552211521L), ("hong", 19, 13287994007L), ("zhi", 21, 15552211523L))
+        )
+        .toDF("name", "age", "phone")
+      SQLSession.showDF(
+        sparkSession.sparkContext,
+        "test",
+        dataFrame,
+        "",
+        10,
+        engineExecutionContext
+      )
+    }
+  }
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org