aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2016-01-30 00:20:28 -0800
committerReynold Xin <rxin@databricks.com>2016-01-30 00:20:28 -0800
commit289373b28cd2546165187de2e6a9185a1257b1e7 (patch)
treeb541a6e52a4ff20604689efafbfa0df7ad0901f5 /repl
parentdab246f7e4664d36073ec49d9df8a11c5e998cdb (diff)
downloadspark-289373b28cd2546165187de2e6a9185a1257b1e7.tar.gz
spark-289373b28cd2546165187de2e6a9185a1257b1e7.tar.bz2
spark-289373b28cd2546165187de2e6a9185a1257b1e7.zip
[SPARK-6363][BUILD] Make Scala 2.11 the default Scala version
This patch changes Spark's build to make Scala 2.11 the default Scala version. To be clear, this does not mean that Spark will stop supporting Scala 2.10: users will still be able to compile Spark for Scala 2.10 by following the instructions on the "Building Spark" page; however, it does mean that Scala 2.11 will be the default Scala version used by our CI builds (including pull request builds). The Scala 2.11 compiler is faster than 2.10, so I think we'll be able to look forward to a slight speedup in our CI builds (it looks like it's about 2X faster for the Maven compile-only builds, for instance). After this patch is merged, I'll update Jenkins to add new compile-only jobs to ensure that Scala 2.10 compilation doesn't break. Author: Josh Rosen <joshrosen@databricks.com> Closes #10608 from JoshRosen/SPARK-6363.
Diffstat (limited to 'repl')
-rw-r--r--repl/pom.xml8
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala9
-rw-r--r--repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala7
3 files changed, 13 insertions, 11 deletions
diff --git a/repl/pom.xml b/repl/pom.xml
index efc3dd452e..0f396c9b80 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -20,13 +20,13 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
- <artifactId>spark-parent_2.10</artifactId>
+ <artifactId>spark-parent_2.11</artifactId>
<version>2.0.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<groupId>org.apache.spark</groupId>
- <artifactId>spark-repl_2.10</artifactId>
+ <artifactId>spark-repl_2.11</artifactId>
<packaging>jar</packaging>
<name>Spark Project REPL</name>
<url>http://spark.apache.org/</url>
@@ -159,7 +159,7 @@
<profile>
<id>scala-2.10</id>
<activation>
- <property><name>!scala-2.11</name></property>
+ <property><name>scala-2.10</name></property>
</activation>
<dependencies>
<dependency>
@@ -173,7 +173,7 @@
<profile>
<id>scala-2.11</id>
<activation>
- <property><name>scala-2.11</name></property>
+ <property><name>!scala-2.10</name></property>
</activation>
<properties>
<extra.source.dir>scala-2.11/src/main/scala</extra.source.dir>
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index bb3081d129..07ba28bb07 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -33,7 +33,8 @@ object Main extends Logging {
var sparkContext: SparkContext = _
var sqlContext: SQLContext = _
- var interp = new SparkILoop // this is a public var because tests reset it.
+ // this is a public var because tests reset it.
+ var interp: SparkILoop = _
private var hasErrors = false
@@ -43,6 +44,12 @@ object Main extends Logging {
}
def main(args: Array[String]) {
+ doMain(args, new SparkILoop)
+ }
+
+ // Visible for testing
+ private[repl] def doMain(args: Array[String], _interp: SparkILoop): Unit = {
+ interp = _interp
val interpArguments = List(
"-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}",
diff --git a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 63f3688c9e..b9ed79da42 100644
--- a/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -50,12 +50,7 @@ class ReplSuite extends SparkFunSuite {
System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
System.setProperty("spark.master", master)
- val interp = {
- new SparkILoop(in, new PrintWriter(out))
- }
- org.apache.spark.repl.Main.interp = interp
- Main.main(Array("-classpath", classpath)) // call main
- org.apache.spark.repl.Main.interp = null
+ Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out)))
if (oldExecutorClasspath != null) {
System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath)