diff options
author | Iulian Dragos <jaguarul@gmail.com> | 2015-07-10 16:22:49 +0100 |
---|---|---|
committer | Sean Owen <sowen@cloudera.com> | 2015-07-10 16:22:49 +0100 |
commit | 11e22b74a080ea58fb9410b5cc6fa4c03f9198f2 (patch) | |
tree | ed78465ebd47b2de8e666f9487f1e727b02f286f /repl/scala-2.11/src/test | |
parent | e14b545d2dcbc4587688b4c46718d3680b0a2f67 (diff) | |
download | spark-11e22b74a080ea58fb9410b5cc6fa4c03f9198f2.tar.gz spark-11e22b74a080ea58fb9410b5cc6fa4c03f9198f2.tar.bz2 spark-11e22b74a080ea58fb9410b5cc6fa4c03f9198f2.zip |
[SPARK-7944] [SPARK-8013] Remove most of the Spark REPL fork for Scala 2.11
This PR removes most of the code in the Spark REPL for Scala 2.11 and leaves just a couple of overridden methods in `SparkILoop` in order to:
- change welcome message
- restrict available commands (like `:power`)
- initialize Spark context
The two codebases have diverged and it's extremely hard to backport fixes from the upstream REPL. This somewhat radical step is absolutely necessary in order to fix other REPL tickets (like SPARK-8013 - Hive Thrift server for 2.11). BTW, the Scala REPL has fixed the serialization-unfriendly wrappers thanks to ScrapCodes's work in [#4522](https://github.com/scala/scala/pull/4522)
All tests pass and I tried the `spark-shell` on our Mesos cluster with some simple jobs (including with additional jars), everything looked good.
As soon as Scala 2.11.7 is out we need to upgrade and get a shaded `jline` dependency, clearing the way for SPARK-8013.
/cc pwendell
Author: Iulian Dragos <jaguarul@gmail.com>
Closes #6903 from dragos/issue/no-spark-repl-fork and squashes the following commits:
c596c6f [Iulian Dragos] Merge branch 'master' into issue/no-spark-repl-fork
2b1a305 [Iulian Dragos] Removed spaces around multiple imports.
0ce67a6 [Iulian Dragos] Remove -verbose flag for java compiler (added by mistake in an earlier commit).
10edaf9 [Iulian Dragos] Keep the jline dependency only in the 2.10 build.
529293b [Iulian Dragos] Add back Spark REPL files to rat-excludes, since they are part of the 2.10 real.
d85370d [Iulian Dragos] Remove jline dependency from the Spark REPL.
b541930 [Iulian Dragos] Merge branch 'master' into issue/no-spark-repl-fork
2b15962 [Iulian Dragos] Change jline dependency and bump Scala version.
b300183 [Iulian Dragos] Rename package and add license on top of the file, remove files from rat-excludes and removed `-Yrepl-sync` per reviewer’s request.
9d46d85 [Iulian Dragos] Fix SPARK-7944.
abcc7cb [Iulian Dragos] Remove the REPL forked code.
Diffstat (limited to 'repl/scala-2.11/src/test')
-rw-r--r-- | repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 11 |
1 files changed, 2 insertions, 9 deletions
diff --git a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 9ecc7c229e..e1cee97de3 100644 --- a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -22,14 +22,11 @@ import java.net.URLClassLoader import scala.collection.mutable.ArrayBuffer import scala.concurrent.duration._ -import scala.tools.nsc.interpreter.SparkILoop import org.apache.commons.lang3.StringEscapeUtils import org.apache.spark.{SparkContext, SparkFunSuite} import org.apache.spark.util.Utils - - class ReplSuite extends SparkFunSuite { def runInterpreter(master: String, input: String): String = { @@ -87,10 +84,6 @@ class ReplSuite extends SparkFunSuite { settings = new scala.tools.nsc.Settings settings.usejavacp.value = true org.apache.spark.repl.Main.interp = this - override def createInterpreter() { - intp = new SparkILoopInterpreter - intp.setContextClassLoader() - } } val out = new StringWriter() @@ -274,7 +267,7 @@ class ReplSuite extends SparkFunSuite { test("SPARK-2632 importing a method from non serializable class and not using it.") { val output = runInterpreter("local", - """ + """ |class TestClass() { def testMethod = 3 } |val t = new TestClass |import t.testMethod @@ -319,7 +312,7 @@ class ReplSuite extends SparkFunSuite { assertDoesNotContain("Exception", output) assertContains("ret: Array[Foo] = Array(Foo(1),", output) } - + test("collecting objects of class defined in repl - shuffling") { val output = runInterpreter("local-cluster[1,1,512]", """ |