diff options
author | Prashant Sharma <scrapcodes@gmail.com> | 2014-07-31 22:57:13 -0700 |
---|---|---|
committer | Michael Armbrust <michael@databricks.com> | 2014-07-31 22:57:13 -0700 |
commit | 149910111331133d52e0cb01b256f7f731b436ad (patch) | |
tree | eb73be80f58993e713cd82ded47e297b1d186a52 /repl/src/test/scala/org | |
parent | 2cdc3e5c6f5601086590a0cebf40a48f7560d02e (diff) | |
download | spark-149910111331133d52e0cb01b256f7f731b436ad.tar.gz spark-149910111331133d52e0cb01b256f7f731b436ad.tar.bz2 spark-149910111331133d52e0cb01b256f7f731b436ad.zip |
SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition.
Without this patch, it imports everything available in the scope.
```scala
scala> val a = 10l
val a = 10l
a: Long = 10
scala> import a._
import a._
import a._
scala> case class A(a: Int) // show
case class A(a: Int) // show
class $read extends Serializable {
def <init>() = {
super.<init>;
()
};
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
import org.apache.spark.SparkContext._;
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
val $VAL5 = $line5.$read.INSTANCE;
import $VAL5.$iw.$iw.$iw.$iw.a;
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
import a._;
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
case class A extends scala.Product with scala.Serializable {
<caseaccessor> <paramaccessor> val a: Int = _;
def <init>(a: Int) = {
super.<init>;
()
}
}
};
val $iw = new $iwC.<init>
};
val $iw = new $iwC.<init>
};
val $iw = new $iwC.<init>
};
val $iw = new $iwC.<init>
};
val $iw = new $iwC.<init>
};
val $iw = new $iwC.<init>
}
object $read extends scala.AnyRef {
def <init>() = {
super.<init>;
()
};
val INSTANCE = new $read.<init>
}
defined class A
```
With this patch, it just imports only the necessary.
```scala
scala> val a = 10l
val a = 10l
a: Long = 10
scala> import a._
import a._
import a._
scala> case class A(a: Int) // show
case class A(a: Int) // show
class $read extends Serializable {
def <init>() = {
super.<init>;
()
};
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
class $iwC extends Serializable {
def <init>() = {
super.<init>;
()
};
case class A extends scala.Product with scala.Serializable {
<caseaccessor> <paramaccessor> val a: Int = _;
def <init>(a: Int) = {
super.<init>;
()
}
}
};
val $iw = new $iwC.<init>
};
val $iw = new $iwC.<init>
}
object $read extends scala.AnyRef {
def <init>() = {
super.<init>;
()
};
val INSTANCE = new $read.<init>
}
defined class A
scala>
```
This patch also adds a `:fallback` mode on being enabled it will restore the spark-shell's 1.0.0 behaviour.
Author: Prashant Sharma <scrapcodes@gmail.com>
Author: Yin Huai <huai@cse.ohio-state.edu>
Author: Prashant Sharma <prashant.s@imaginea.com>
Closes #1635 from ScrapCodes/repl-fix-necessary-imports and squashes the following commits:
b1968d2 [Prashant Sharma] Added toschemaRDD to test case.
0b712bb [Yin Huai] Add a REPL test to test importing a method.
02ad8ff [Yin Huai] Add a REPL test for importing SQLContext.createSchemaRDD.
ed6d0c7 [Prashant Sharma] Added a fallback mode, incase users run into issues while using repl.
b63d3b2 [Prashant Sharma] SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition.
Diffstat (limited to 'repl/src/test/scala/org')
-rw-r--r-- | repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 27 |
1 files changed, 27 insertions, 0 deletions
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index e2d8d5ff38..c8763eb277 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -256,6 +256,33 @@ class ReplSuite extends FunSuite { assertDoesNotContain("error:", output) assertDoesNotContain("Exception", output) } + + test("SPARK-2576 importing SQLContext.createSchemaRDD.") { + // We need to use local-cluster to test this case. + val output = runInterpreter("local-cluster[1,1,512]", + """ + |val sqlContext = new org.apache.spark.sql.SQLContext(sc) + |import sqlContext.createSchemaRDD + |case class TestCaseClass(value: Int) + |sc.parallelize(1 to 10).map(x => TestCaseClass(x)).toSchemaRDD.collect + """.stripMargin) + assertDoesNotContain("error:", output) + assertDoesNotContain("Exception", output) + } + + test("SPARK-2632 importing a method from non serializable class and not using it.") { + val output = runInterpreter("local", + """ + |class TestClass() { def testMethod = 3 } + |val t = new TestClass + |import t.testMethod + |case class TestCaseClass(value: Int) + |sc.parallelize(1 to 10).map(x => TestCaseClass(x)).collect + """.stripMargin) + assertDoesNotContain("error:", output) + assertDoesNotContain("Exception", output) + } + if (System.getenv("MESOS_NATIVE_LIBRARY") != null) { test("running on Mesos") { val output = runInterpreter("localquiet", |