You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2019/03/01 05:37:48 UTC

[GitHub] hustfeiwang commented on a change in pull request #23276: [SPARK-26321][SQL] Improve the behavior of sql text splitting for the spark-sql command line

hustfeiwang commented on a change in pull request #23276: [SPARK-26321][SQL] Improve the behavior of sql text splitting for the spark-sql command line 
URL: https://github.com/apache/spark/pull/23276#discussion_r261473156
 
 

 ##########
 File path: sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
 ##########
 @@ -331,6 +337,65 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
     console.printInfo(s"Spark master: $master, Application Id: $appId")
   }
 
+  // method body imported from Hive and translated from Java to Scala
+  override def processLine(line: String, allowInterrupting: Boolean): Int = {
+    var oldSignal: SignalHandler = null
+    var interruptSignal: Signal = null
+
+    if (allowInterrupting) {
+      // Remember all threads that were running at the time we started line processing.
+      // Hook up the custom Ctrl+C handler while processing this line
+      interruptSignal = new Signal("INT")
+      oldSignal = Signal.handle(interruptSignal, new SignalHandler() {
+        private val cliThread = Thread.currentThread()
+        private var interruptRequested: Boolean = false
+
+        override def handle(signal: Signal) {
+          val initialRequest = !interruptRequested
+          interruptRequested = true
+
+          // Kill the VM on second ctrl+c
+          if (!initialRequest) {
+            console.printInfo("Exiting the JVM")
+            System.exit(127)
+          }
+
+          // Interrupt the CLI thread to stop the current statement and return
+          // to prompt
+          console.printInfo("Interrupting... Be patient, this might take some time.")
+          console.printInfo("Press Ctrl+C again to kill JVM")
+
+          // First, kill any running Spark jobs
+          // TODO
 
 Review comment:
   I think    `HiveInterruptUtils.interrupt()`  should be added here.
   Because  SparkSQLCLIDriver has invoke   installSignalHandler() to  add HiveInterruptCallback, which would cancel all spark jobs when 
    `HiveInterruptUtils.interrupt()` is invoked. 
   see https://github.com/apache/spark/blob/bc7592ba1186001127ecfae327ac22a0727b8bab/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala#L67

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org