aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-07-04 00:05:27 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-07-04 00:05:27 -0700
commitd43415075b3468fe8aa56de5d2907d409bb96347 (patch)
tree5311c5f32d2035f7eeaab88e1bbfc2d57dafe8b0 /repl
parent544880457de556d1ad52e8cb7e1eca19da95f517 (diff)
downloadspark-d43415075b3468fe8aa56de5d2907d409bb96347.tar.gz
spark-d43415075b3468fe8aa56de5d2907d409bb96347.tar.bz2
spark-d43415075b3468fe8aa56de5d2907d409bb96347.zip
[SPARK-1199][REPL] Remove VALId and use the original import style for defined classes.
This is an alternate solution to #1176. Author: Prashant Sharma <prashant.s@imaginea.com> Closes #1179 from ScrapCodes/SPARK-1199/repl-fix-second-approach and squashes the following commits: 820b34b [Prashant Sharma] Here we generate two kinds of import wrappers based on whether it is a class or not.
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala7
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkImports.scala23
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala12
3 files changed, 31 insertions, 11 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
index 7c83fa9d4d..3842c291d0 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -744,7 +744,7 @@ import org.apache.spark.util.Utils
*
* Read! Eval! Print! Some of that not yet centralized here.
*/
- class ReadEvalPrint(lineId: Int) {
+ class ReadEvalPrint(val lineId: Int) {
def this() = this(freshLineId())
private var lastRun: Run = _
@@ -1241,7 +1241,10 @@ import org.apache.spark.util.Utils
// old style
beSilentDuring(parse(code)) foreach { ts =>
ts foreach { t =>
- withoutUnwrapping(logDebug(asCompactString(t)))
+ if (isShow || isShowRaw)
+ withoutUnwrapping(echo(asCompactString(t)))
+ else
+ withoutUnwrapping(logDebug(asCompactString(t)))
}
}
}
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
index 419796b68b..bce5c74b9d 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -182,15 +182,26 @@ trait SparkImports {
// ambiguity errors will not be generated. Also, quote
// the name of the variable, so that we don't need to
// handle quoting keywords separately.
+ case x: ClassHandler =>
+ // I am trying to guess if the import is a defined class
+ // This is an ugly hack, I am not 100% sure of the consequences.
+ // Here we, let everything but "defined classes" use the import with val.
+ // The reason for this is, otherwise the remote executor tries to pull the
+ // classes involved and may fail.
+ for (imv <- x.definedNames) {
+ val objName = req.lineRep.readPath
+ code.append("import " + objName + ".INSTANCE" + req.accessPath + ".`" + imv + "`\n")
+ }
+
case x =>
for (imv <- x.definedNames) {
if (currentImps contains imv) addWrapper()
val objName = req.lineRep.readPath
- val valName = "$VAL" + newValId();
+ val valName = "$VAL" + req.lineRep.lineId
if(!code.toString.endsWith(".`" + imv + "`;\n")) { // Which means already imported
- code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
- code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+ code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+ code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
}
// code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
// code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
@@ -211,10 +222,4 @@ trait SparkImports {
private def membersAtPickler(sym: Symbol): List[Symbol] =
beforePickler(sym.info.nonPrivateMembers.toList)
- private var curValId = 0
-
- private def newValId(): Int = {
- curValId += 1
- curValId
- }
}
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index f4ba8d9cc0..f2aa42dbcb 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -235,6 +235,18 @@ class ReplSuite extends FunSuite {
assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
}
+ test("SPARK-1199-simple-reproduce") {
+ val output = runInterpreter("local-cluster[1,1,512]",
+ """
+ |case class Sum(exp: String, exp2: String)
+ |val a = Sum("A", "B")
+ |def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" }
+ |b(a)
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ }
+
if (System.getenv("MESOS_NATIVE_LIBRARY") != null) {
test("running on Mesos") {
val output = runInterpreter("localquiet",