You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/01/08 06:32:24 UTC
[2/4] git commit: Added ‘-i’ command line option to spark REPL. We had to create a new implementation of both scala.tools.nsc.CompilerCommand and scala.tools.nsc.Settings, because using scala.tools.nsc.GenericRunnerSettings would bring in other options (
Added ‘-i’ command line option to spark REPL.
We had to create a new implementation of both scala.tools.nsc.CompilerCommand and scala.tools.nsc.Settings, because using scala.tools.nsc.GenericRunnerSettings would bring in other options (-howtorun, -save and -execute) which don’t make sense in Spark.
Any new Spark specific command line option could now be added to org.apache.spark.repl.SparkRunnerSettings class.
Since the behavior of loading a script from the command line should be the same as loading it using the “:load” command inside the shell, the script should be loaded when the SparkContext is available, that’s why we had to move the call to ‘loadfiles(settings)’ _after_ the call to postInitialization(). This still doesn’t work if ‘isAsync = true’.
Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/0b6db8c1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/0b6db8c1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/0b6db8c1
Branch: refs/heads/master
Commit: 0b6db8c186183704feafd26b454fff58a0e31861
Parents: 87248bd
Author: Luca Rosellini <lr...@paradigmatecnologico.com>
Authored: Fri Jan 3 12:57:06 2014 +0100
Committer: Luca Rosellini <lr...@paradigmatecnologico.com>
Committed: Fri Jan 3 12:57:06 2014 +0100
----------------------------------------------------------------------
.../apache/spark/repl/SparkCommandLine.scala | 22 ++++++++++++++++++++
.../org/apache/spark/repl/SparkILoop.scala | 7 ++++---
.../apache/spark/repl/SparkRunnerSettings.scala | 17 +++++++++++++++
3 files changed, 43 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0b6db8c1/repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
----------------------------------------------------------------------
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala b/repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
new file mode 100644
index 0000000..acb1e4c
--- /dev/null
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
@@ -0,0 +1,22 @@
+package org.apache.spark.repl
+
+import scala.tools.nsc.{Settings, CompilerCommand}
+import scala.Predef._
+
+/**
+ * Command class enabling Spark-specific command line options (provided by
+ * <i>org.apache.spark.repl.SparkRunnerSettings</i>).
+ *
+ * @author Luca Rosellini <lu...@stratio.com>
+ */
+class SparkCommandLine(args: List[String], override val settings: Settings)
+ extends CompilerCommand(args, settings) {
+
+ def this(args: List[String], error: String => Unit) {
+ this(args, new SparkRunnerSettings(error))
+ }
+
+ def this(args: List[String]) {
+ this(args, str => Console.println("Error: " + str))
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0b6db8c1/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
----------------------------------------------------------------------
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index f108c70..14c3feb 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -835,7 +835,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
// runs :load `file` on any files passed via -i
def loadFiles(settings: Settings) = settings match {
- case settings: GenericRunnerSettings =>
+ case settings: SparkRunnerSettings =>
for (filename <- settings.loadfiles.value) {
val cmd = ":load " + filename
command(cmd)
@@ -902,7 +902,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
addThunk(printWelcome())
addThunk(initializeSpark())
- loadFiles(settings)
// it is broken on startup; go ahead and exit
if (intp.reporter.hasErrors)
return false
@@ -922,6 +921,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
}
// printWelcome()
+ loadFiles(settings)
+
try loop()
catch AbstractOrMissingHandler()
finally closeInterpreter()
@@ -955,7 +956,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
/** process command-line arguments and do as they request */
def process(args: Array[String]): Boolean = {
- val command = new CommandLine(args.toList, echo)
+ val command = new SparkCommandLine(args.toList, msg => echo(msg))
def neededHelp(): String =
(if (command.settings.help.value) command.usageMsg + "\n" else "") +
(if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0b6db8c1/repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala
----------------------------------------------------------------------
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala b/repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala
new file mode 100644
index 0000000..235a688
--- /dev/null
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala
@@ -0,0 +1,17 @@
+package org.apache.spark.repl
+
+import scala.tools.nsc.Settings
+
+/**
+ * <i>scala.tools.nsc.Settings</i> implementation adding Spark-specific REPL
+ * command line options.
+ *
+ * @author Luca Rosellini <lu...@stratio.com>
+ */
+class SparkRunnerSettings(error: String => Unit) extends Settings(error){
+
+ val loadfiles = MultiStringSetting(
+ "-i",
+ "file",
+ "load a file (assumes the code is given interactively)")
+}