aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-07-21 11:53:57 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-07-21 11:54:38 -0700
commite0cc3843dedeeb7840c17b7b487e4d8f31a2d320 (patch)
tree25c7887f30e3a950db778cac996ca689cc2dac9f /repl
parent480669f2b2a4aea5c93ba8403fdf98894acdd87a (diff)
downloadspark-e0cc3843dedeeb7840c17b7b487e4d8f31a2d320.tar.gz
spark-e0cc3843dedeeb7840c17b7b487e4d8f31a2d320.tar.bz2
spark-e0cc3843dedeeb7840c17b7b487e4d8f31a2d320.zip
Revert "[SPARK-1199][REPL] Remove VALId and use the original import style for defined classes."
This reverts commit 6e0b7e5308263bef60120debe05577868ebaeea9.
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala7
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkImports.scala23
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala12
3 files changed, 11 insertions, 31 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
index 3842c291d0..7c83fa9d4d 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -744,7 +744,7 @@ import org.apache.spark.util.Utils
*
* Read! Eval! Print! Some of that not yet centralized here.
*/
- class ReadEvalPrint(val lineId: Int) {
+ class ReadEvalPrint(lineId: Int) {
def this() = this(freshLineId())
private var lastRun: Run = _
@@ -1241,10 +1241,7 @@ import org.apache.spark.util.Utils
// old style
beSilentDuring(parse(code)) foreach { ts =>
ts foreach { t =>
- if (isShow || isShowRaw)
- withoutUnwrapping(echo(asCompactString(t)))
- else
- withoutUnwrapping(logDebug(asCompactString(t)))
+ withoutUnwrapping(logDebug(asCompactString(t)))
}
}
}
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
index bce5c74b9d..419796b68b 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -182,26 +182,15 @@ trait SparkImports {
// ambiguity errors will not be generated. Also, quote
// the name of the variable, so that we don't need to
// handle quoting keywords separately.
- case x: ClassHandler =>
- // I am trying to guess if the import is a defined class
- // This is an ugly hack, I am not 100% sure of the consequences.
- // Here we, let everything but "defined classes" use the import with val.
- // The reason for this is, otherwise the remote executor tries to pull the
- // classes involved and may fail.
- for (imv <- x.definedNames) {
- val objName = req.lineRep.readPath
- code.append("import " + objName + ".INSTANCE" + req.accessPath + ".`" + imv + "`\n")
- }
-
case x =>
for (imv <- x.definedNames) {
if (currentImps contains imv) addWrapper()
val objName = req.lineRep.readPath
- val valName = "$VAL" + req.lineRep.lineId
+ val valName = "$VAL" + newValId();
if(!code.toString.endsWith(".`" + imv + "`;\n")) { // Which means already imported
- code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
- code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+ code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+ code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
}
// code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
// code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
@@ -222,4 +211,10 @@ trait SparkImports {
private def membersAtPickler(sym: Symbol): List[Symbol] =
beforePickler(sym.info.nonPrivateMembers.toList)
+ private var curValId = 0
+
+ private def newValId(): Int = {
+ curValId += 1
+ curValId
+ }
}
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index edd5243798..95e1793832 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -225,18 +225,6 @@ class ReplSuite extends FunSuite {
assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
}
- test("SPARK-1199-simple-reproduce") {
- val output = runInterpreter("local-cluster[1,1,512]",
- """
- |case class Sum(exp: String, exp2: String)
- |val a = Sum("A", "B")
- |def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" }
- |b(a)
- """.stripMargin)
- assertDoesNotContain("error:", output)
- assertDoesNotContain("Exception", output)
- }
-
if (System.getenv("MESOS_NATIVE_LIBRARY") != null) {
test("running on Mesos") {
val output = runInterpreter("localquiet",