You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by fe...@apache.org on 2023/01/12 12:58:58 UTC

[kyuubi] branch master updated: [KYUUBI #4150] Support to execute Scala statement synchronized to prevent conflicts

This is an automated email from the ASF dual-hosted git repository.

feiwang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 466916317 [KYUUBI #4150] Support to execute Scala statement synchronized to prevent conflicts
466916317 is described below

commit 4669163176c71f9ecce5b8bd238ff61d73e59e23
Author: fwang12 <fw...@ebay.com>
AuthorDate: Thu Jan 12 20:58:49 2023 +0800

    [KYUUBI #4150] Support to execute Scala statement synchronized to prevent conflicts
    
    ### _Why are the changes needed?_
    
    Support to execute Scala statement synchronized to prevent conflicts, because they share the same `spark.repl.class.outputDir`.
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #4150 from turboFei/lock_scala.
    
    Closes #4150
    
    f11f12a26 [fwang12] lock more
    d7a9fe8ed [fwang12] remove conf
    d4175827e [fwang12] update docs
    c1524a7fc [fwang12] lock required
    a6e663be7 [fwang12] lock scala
    
    Authored-by: fwang12 <fw...@ebay.com>
    Signed-off-by: fwang12 <fw...@ebay.com>
---
 .../apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)

diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala
index c5f25a834..27090fae4 100644
--- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala
+++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/repl/KyuubiSparkILoop.scala
@@ -18,6 +18,7 @@
 package org.apache.kyuubi.engine.spark.repl
 
 import java.io.{ByteArrayOutputStream, File}
+import java.util.concurrent.locks.ReentrantLock
 
 import scala.tools.nsc.Settings
 import scala.tools.nsc.interpreter.IR
@@ -32,10 +33,11 @@ private[spark] case class KyuubiSparkILoop private (
     spark: SparkSession,
     output: ByteArrayOutputStream)
   extends SparkILoop(None, new JPrintWriter(output)) {
+  import KyuubiSparkILoop._
 
   val result = new DataFrameHolder(spark)
 
-  private def initialize(): Unit = {
+  private def initialize(): Unit = withLockRequired {
     settings = new Settings
     val interpArguments = List(
       "-Yrepl-class-based",
@@ -98,7 +100,7 @@ private[spark] case class KyuubiSparkILoop private (
 
   def clearResult(statementId: String): Unit = result.unset(statementId)
 
-  def interpretWithRedirectOutError(statement: String): IR.Result = {
+  def interpretWithRedirectOutError(statement: String): IR.Result = withLockRequired {
     Console.withOut(output) {
       Console.withErr(output) {
         this.interpret(statement)
@@ -120,4 +122,12 @@ private[spark] object KyuubiSparkILoop {
     iLoop.initialize()
     iLoop
   }
+
+  private val lock = new ReentrantLock()
+  private def withLockRequired[T](block: => T): T = {
+    try {
+      lock.lock()
+      block
+    } finally lock.unlock()
+  }
 }