From 149910111331133d52e0cb01b256f7f731b436ad Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Thu, 31 Jul 2014 22:57:13 -0700 Subject: SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition. Without this patch, it imports everything available in the scope. ```scala scala> val a = 10l val a = 10l a: Long = 10 scala> import a._ import a._ import a._ scala> case class A(a: Int) // show case class A(a: Int) // show class $read extends Serializable { def () = { super.; () }; class $iwC extends Serializable { def () = { super.; () }; class $iwC extends Serializable { def () = { super.; () }; import org.apache.spark.SparkContext._; class $iwC extends Serializable { def () = { super.; () }; val $VAL5 = $line5.$read.INSTANCE; import $VAL5.$iw.$iw.$iw.$iw.a; class $iwC extends Serializable { def () = { super.; () }; import a._; class $iwC extends Serializable { def () = { super.; () }; class $iwC extends Serializable { def () = { super.; () }; case class A extends scala.Product with scala.Serializable { val a: Int = _; def (a: Int) = { super.; () } } }; val $iw = new $iwC. }; val $iw = new $iwC. }; val $iw = new $iwC. }; val $iw = new $iwC. }; val $iw = new $iwC. }; val $iw = new $iwC. } object $read extends scala.AnyRef { def () = { super.; () }; val INSTANCE = new $read. } defined class A ``` With this patch, it just imports only the necessary. ```scala scala> val a = 10l val a = 10l a: Long = 10 scala> import a._ import a._ import a._ scala> case class A(a: Int) // show case class A(a: Int) // show class $read extends Serializable { def () = { super.; () }; class $iwC extends Serializable { def () = { super.; () }; class $iwC extends Serializable { def () = { super.; () }; case class A extends scala.Product with scala.Serializable { val a: Int = _; def (a: Int) = { super.; () } } }; val $iw = new $iwC. }; val $iw = new $iwC. } object $read extends scala.AnyRef { def () = { super.; () }; val INSTANCE = new $read. } defined class A scala> ``` This patch also adds a `:fallback` mode on being enabled it will restore the spark-shell's 1.0.0 behaviour. Author: Prashant Sharma Author: Yin Huai Author: Prashant Sharma Closes #1635 from ScrapCodes/repl-fix-necessary-imports and squashes the following commits: b1968d2 [Prashant Sharma] Added toschemaRDD to test case. 0b712bb [Yin Huai] Add a REPL test to test importing a method. 02ad8ff [Yin Huai] Add a REPL test for importing SQLContext.createSchemaRDD. ed6d0c7 [Prashant Sharma] Added a fallback mode, incase users run into issues while using repl. b63d3b2 [Prashant Sharma] SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition. --- repl/pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'repl/pom.xml') diff --git a/repl/pom.xml b/repl/pom.xml index 4ebb1b82f0..68f4504450 100644 --- a/repl/pom.xml +++ b/repl/pom.xml @@ -55,6 +55,12 @@ ${project.version} runtime + + org.apache.spark + spark-sql_${scala.binary.version} + ${project.version} + test + org.eclipse.jetty jetty-server -- cgit v1.2.3