You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2016/03/28 13:03:14 UTC
spark git commit: [SPARK-14102][CORE] Block `reset` command in
SparkShell
Repository: spark
Updated Branches:
refs/heads/master 7b8415401 -> b66aa9006
[SPARK-14102][CORE] Block `reset` command in SparkShell
## What changes were proposed in this pull request?
Spark Shell provides an easy way to use Spark in Scala environment. This PR adds `reset` command to a blocked list, also cleaned up according to the Scala coding style.
```scala
scala> sc
res0: org.apache.spark.SparkContext = org.apache.spark.SparkContext718fad24
scala> :reset
scala> sc
<console>:11: error: not found: value sc
sc
^
```
If we blocks `reset`, Spark Shell works like the followings.
```scala
scala> :reset
reset: no such command. Type :help for help.
scala> :re
re is ambiguous: did you mean :replay or :require?
```
## How was this patch tested?
Manual. Run `bin/spark-shell` and type `:reset`.
Author: Dongjoon Hyun <do...@apache.org>
Closes #11920 from dongjoon-hyun/SPARK-14102.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b66aa900
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b66aa900
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b66aa900
Branch: refs/heads/master
Commit: b66aa900619a86b7acbb7c3f96abc96ea2faa53c
Parents: 7b84154
Author: Dongjoon Hyun <do...@apache.org>
Authored: Mon Mar 28 12:04:21 2016 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Mon Mar 28 12:04:21 2016 +0100
----------------------------------------------------------------------
.../scala/org/apache/spark/repl/SparkILoop.scala | 17 +++++++----------
1 file changed, 7 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/b66aa900/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
----------------------------------------------------------------------
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 7ed6d3b..db09d6a 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -19,12 +19,11 @@ package org.apache.spark.repl
import java.io.BufferedReader
-import Predef.{println => _, _}
-import scala.util.Properties.{javaVersion, versionString, javaVmName}
-
-import scala.tools.nsc.interpreter.{JPrintWriter, ILoop}
+import scala.Predef.{println => _, _}
import scala.tools.nsc.Settings
+import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
import scala.tools.nsc.util.stringFromStream
+import scala.util.Properties.{javaVersion, javaVmName, versionString}
/**
* A Spark-specific interactive shell.
@@ -75,11 +74,9 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
echo("Type :help for more information.")
}
- import LoopCommand.{ cmd, nullary }
-
- private val blockedCommands = Set("implicits", "javap", "power", "type", "kind")
+ private val blockedCommands = Set("implicits", "javap", "power", "type", "kind", "reset")
- /** Standard commands **/
+ /** Standard commands */
lazy val sparkStandardCommands: List[SparkILoop.this.LoopCommand] =
standardCommands.filter(cmd => !blockedCommands(cmd.name))
@@ -112,9 +109,9 @@ object SparkILoop {
val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
val repl = new SparkILoop(input, output)
- if (sets.classpath.isDefault)
+ if (sets.classpath.isDefault) {
sets.classpath.value = sys.props("java.class.path")
-
+ }
repl process sets
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org