You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by xx...@apache.org on 2022/08/23 09:50:17 UTC

[kylin] branch kylin5 updated: KYLIN-5234 support Kylin 5.0 on hadoop 3.2.1

This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin5 by this push:
     new b24d97c64f KYLIN-5234 support Kylin 5.0 on hadoop 3.2.1
b24d97c64f is described below

commit b24d97c64f7ff0a32046d9e32201de75bd4fec8c
Author: Mukvin <bo...@163.com>
AuthorDate: Tue Aug 23 16:02:02 2022 +0800

    KYLIN-5234 support Kylin 5.0 on hadoop 3.2.1
---
 build/apache_release/build.sh                                  |  3 +--
 build/sbin/spark-test.sh                                       |  8 ++++----
 .../kylin/rest/service/task/QueryHistoryTaskScheduler.java     |  2 +-
 .../src/main/java/org/apache/kylin/common/KylinConfigExt.java  |  5 +++--
 src/spark-project/engine-build-sdk/pom.xml                     |  4 ++--
 .../query/asyncprofiler/AsyncProfilerExecutorPlugin.scala      | 10 +++-------
 src/spark-project/spark-common/pom.xml                         |  4 ++--
 7 files changed, 16 insertions(+), 20 deletions(-)

diff --git a/build/apache_release/build.sh b/build/apache_release/build.sh
index 35e635bde3..98c47f066c 100755
--- a/build/apache_release/build.sh
+++ b/build/apache_release/build.sh
@@ -22,8 +22,7 @@ dir=$(dirname ${0})
 cd ${dir}/../..
 
 echo 'Build back-end'
-echo "-----SKIP------"
-## mvn clean install -DskipTests -Dcheckstyle.skip $@ || { exit 1; }
+mvn clean install -DskipTests $@ || { exit 1; }
 
 #package webapp
 echo 'Build front-end'
diff --git a/build/sbin/spark-test.sh b/build/sbin/spark-test.sh
index e74f59987a..f338d477ed 100755
--- a/build/sbin/spark-test.sh
+++ b/build/sbin/spark-test.sh
@@ -215,18 +215,18 @@ then
     [[ ! -f ${full_input_file} ]] || rm -f ${full_input_file}
     echo "Hello Spark Client" >> ${full_input_file};
 
-    hadoop ${KYLIN_HADOOP_PARAM} fs -put -f ${full_input_file} ${KAP_WORKING_DIR}
+    hadoop ${KYLIN_HADOOP_PARAM} fs -put -f ${full_input_file} ${KYLIN_WORKING_DIR}
 
     spark_submit='$SPARK_HOME/bin/spark-submit '
-    spark_submit_conf=' --class org.apache.kylin.tool.setup.KapSparkTaskTestCLI --name Test  $KYLIN_SPARK_TEST_JAR_PATH ${KAP_WORKING_DIR}/${input_file} '
+    spark_submit_conf=' --class org.apache.kylin.tool.setup.KapSparkTaskTestCLI --name Test  $KYLIN_SPARK_TEST_JAR_PATH ${KYLIN_WORKING_DIR}/${input_file} '
     submitCommand=${spark_submit}${confStr}${spark_submit_conf}
     verbose "The submit command is: $submitCommand"
     eval $submitCommand
     if [ $? == 0 ];then
-        hadoop ${KYLIN_HADOOP_PARAM} fs -rm -r -skipTrash ${KAP_WORKING_DIR}/${input_file}
+        hadoop ${KYLIN_HADOOP_PARAM} fs -rm -r -skipTrash ${KYLIN_WORKING_DIR}/${input_file}
         rm -rf ${full_input_file}
     else
-        hadoop ${KYLIN_HADOOP_PARAM} fs -rm -r -skipTrash ${KAP_WORKING_DIR}/${input_file}
+        hadoop ${KYLIN_HADOOP_PARAM} fs -rm -r -skipTrash ${KYLIN_WORKING_DIR}/${input_file}
         rm -rf ${full_input_file}
         quit "ERROR: Test of submitting spark job failed,error when testing spark with spark configurations in Kyligence Enterprise!"
     fi
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
index 3a3feabbce..4219fc538c 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
@@ -94,7 +94,7 @@ public class QueryHistoryTaskScheduler {
         queryHistoryAccelerateRunner = new QueryHistoryAccelerateRunner(false);
         queryHistoryMetaUpdateRunner = new QueryHistoryMetaUpdateRunner();
         if (querySmartSupporter == null && SpringContext.getApplicationContext() != null
-                && !KylinConfig.vendor().equals("asf")) {
+                && !KylinConfig.vendor().equals("kylin")) {
             querySmartSupporter = SpringContext.getBean(QuerySmartSupporter.class);
         }
         log.debug("New QueryHistoryAccelerateScheduler created by project {}", project);
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigExt.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigExt.java
index c0774df6a4..3f9a148656 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigExt.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigExt.java
@@ -80,10 +80,11 @@ public class KylinConfigExt extends KylinConfig {
     @Override
     public String getOptional(String prop, String dft) {
         String value = overrides.get(prop);
-        if (value != null)
+        if (value != null) {
             return getSubstitutor().replace(value);
-        else
+        } else {
             return super.getOptional(prop, dft);
+        }
     }
 
     @Override
diff --git a/src/spark-project/engine-build-sdk/pom.xml b/src/spark-project/engine-build-sdk/pom.xml
index aa2a3696bb..fa92e05ded 100644
--- a/src/spark-project/engine-build-sdk/pom.xml
+++ b/src/spark-project/engine-build-sdk/pom.xml
@@ -10,7 +10,7 @@
         <relativePath>../../../pom.xml</relativePath>
     </parent>
     <modelVersion>4.0.0</modelVersion>
-
+    <name>Kylin - Engine Build SDK</name>
     <artifactId>kylin-engine-build-sdk</artifactId>
 
 
@@ -57,4 +57,4 @@
 
     </dependencies>
 
-</project>
\ No newline at end of file
+</project>
diff --git a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/asyncprofiler/AsyncProfilerExecutorPlugin.scala b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/asyncprofiler/AsyncProfilerExecutorPlugin.scala
index 6eaf6c2e71..8c1e180ec6 100644
--- a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/asyncprofiler/AsyncProfilerExecutorPlugin.scala
+++ b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/asyncprofiler/AsyncProfilerExecutorPlugin.scala
@@ -19,11 +19,10 @@
 package org.apache.kylin.query.asyncprofiler
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder
-import org.apache.kylin.common.KylinConfig
 import org.apache.spark.api.plugin.{ExecutorPlugin, PluginContext}
 import org.apache.spark.internal.Logging
 
-import java.util
+import java.util.Map
 import java.util.concurrent.{Executors, TimeUnit}
 
 class AsyncProfilerExecutorPlugin extends ExecutorPlugin with Logging {
@@ -31,12 +30,11 @@ class AsyncProfilerExecutorPlugin extends ExecutorPlugin with Logging {
   private val checkingInterval: Long = 1000
   private var ctx: PluginContext = _
   private var dumped = false
-  private val DEBUG = KylinConfig.getInstanceFromEnv.isUTEnv
 
   private val scheduledExecutorService = Executors.newScheduledThreadPool(1,
     new ThreadFactoryBuilder().setDaemon(true).setNameFormat("profiler-%d").build())
 
-  override def init(_ctx: PluginContext, extraConf: util.Map[String, String]): Unit = {
+  override def init(_ctx: PluginContext, extraConf: Map[String, String]): Unit = {
     ctx = _ctx
     val profile = new Runnable {
       override def run(): Unit = checkAndProfile()
@@ -65,9 +63,7 @@ class AsyncProfilerExecutorPlugin extends ExecutorPlugin with Logging {
         case _ =>
       }
     } catch {
-      case e: Exception => if (!DEBUG) {
-        logInfo("error while communication/profiling", e)
-      }
+      case e: Exception => logInfo("error while communication/profiling", e)
     }
   }
 
diff --git a/src/spark-project/spark-common/pom.xml b/src/spark-project/spark-common/pom.xml
index 41f220ab3b..22e880418a 100644
--- a/src/spark-project/spark-common/pom.xml
+++ b/src/spark-project/spark-common/pom.xml
@@ -22,7 +22,7 @@
     <modelVersion>4.0.0</modelVersion>
     <name>Kylin - Spark Common</name>
     <url>http://kylin.apache.org</url>
-    <description>Kylin  Job Engine - SPARK</description>
+    <description>Kylin Job Engine - SPARK</description>
 
     <artifactId>kylin-spark-common</artifactId>
 
@@ -160,4 +160,4 @@
             </plugin>
         </plugins>
     </build>
-</project>
\ No newline at end of file
+</project>