You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/07/21 20:54:44 UTC
git commit: Revert "[SPARK-1199][REPL] Remove VALId and use the
original import style for defined classes."
Repository: spark
Updated Branches:
refs/heads/branch-1.0 480669f2b -> e0cc3843d
Revert "[SPARK-1199][REPL] Remove VALId and use the original import style for defined classes."
This reverts commit 6e0b7e5308263bef60120debe05577868ebaeea9.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e0cc3843
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e0cc3843
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e0cc3843
Branch: refs/heads/branch-1.0
Commit: e0cc3843dedeeb7840c17b7b487e4d8f31a2d320
Parents: 480669f
Author: Patrick Wendell <pw...@gmail.com>
Authored: Mon Jul 21 11:53:57 2014 -0700
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Mon Jul 21 11:54:38 2014 -0700
----------------------------------------------------------------------
.../org/apache/spark/repl/SparkIMain.scala | 7 ++----
.../org/apache/spark/repl/SparkImports.scala | 23 ++++++++------------
.../scala/org/apache/spark/repl/ReplSuite.scala | 12 ----------
3 files changed, 11 insertions(+), 31 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/e0cc3843/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
----------------------------------------------------------------------
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
index 3842c29..7c83fa9 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -744,7 +744,7 @@ import org.apache.spark.util.Utils
*
* Read! Eval! Print! Some of that not yet centralized here.
*/
- class ReadEvalPrint(val lineId: Int) {
+ class ReadEvalPrint(lineId: Int) {
def this() = this(freshLineId())
private var lastRun: Run = _
@@ -1241,10 +1241,7 @@ import org.apache.spark.util.Utils
// old style
beSilentDuring(parse(code)) foreach { ts =>
ts foreach { t =>
- if (isShow || isShowRaw)
- withoutUnwrapping(echo(asCompactString(t)))
- else
- withoutUnwrapping(logDebug(asCompactString(t)))
+ withoutUnwrapping(logDebug(asCompactString(t)))
}
}
}
http://git-wip-us.apache.org/repos/asf/spark/blob/e0cc3843/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
----------------------------------------------------------------------
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
index bce5c74..419796b 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -182,26 +182,15 @@ trait SparkImports {
// ambiguity errors will not be generated. Also, quote
// the name of the variable, so that we don't need to
// handle quoting keywords separately.
- case x: ClassHandler =>
- // I am trying to guess if the import is a defined class
- // This is an ugly hack, I am not 100% sure of the consequences.
- // Here we, let everything but "defined classes" use the import with val.
- // The reason for this is, otherwise the remote executor tries to pull the
- // classes involved and may fail.
- for (imv <- x.definedNames) {
- val objName = req.lineRep.readPath
- code.append("import " + objName + ".INSTANCE" + req.accessPath + ".`" + imv + "`\n")
- }
-
case x =>
for (imv <- x.definedNames) {
if (currentImps contains imv) addWrapper()
val objName = req.lineRep.readPath
- val valName = "$VAL" + req.lineRep.lineId
+ val valName = "$VAL" + newValId();
if(!code.toString.endsWith(".`" + imv + "`;\n")) { // Which means already imported
- code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
- code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+ code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+ code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
}
// code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
// code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
@@ -222,4 +211,10 @@ trait SparkImports {
private def membersAtPickler(sym: Symbol): List[Symbol] =
beforePickler(sym.info.nonPrivateMembers.toList)
+ private var curValId = 0
+
+ private def newValId(): Int = {
+ curValId += 1
+ curValId
+ }
}
http://git-wip-us.apache.org/repos/asf/spark/blob/e0cc3843/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
----------------------------------------------------------------------
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index edd5243..95e1793 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -225,18 +225,6 @@ class ReplSuite extends FunSuite {
assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
}
- test("SPARK-1199-simple-reproduce") {
- val output = runInterpreter("local-cluster[1,1,512]",
- """
- |case class Sum(exp: String, exp2: String)
- |val a = Sum("A", "B")
- |def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" }
- |b(a)
- """.stripMargin)
- assertDoesNotContain("error:", output)
- assertDoesNotContain("Exception", output)
- }
-
if (System.getenv("MESOS_NATIVE_LIBRARY") != null) {
test("running on Mesos") {
val output = runInterpreter("localquiet",