aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-08-31 19:27:07 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-09-01 14:13:13 -0700
commit46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef (patch)
tree4a46971b36680bc5ef51be81ada8eb47670f6b22 /repl
parenta30fac16ca0525f2001b127e5f9518c9680844c9 (diff)
downloadspark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.tar.gz
spark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.tar.bz2
spark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.zip
Initial work to rename package to org.apache.spark
Diffstat (limited to 'repl')
-rw-r--r--repl/pom.xml12
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala (renamed from repl/src/main/scala/spark/repl/ExecutorClassLoader.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/Main.scala (renamed from repl/src/main/scala/spark/repl/Main.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala (renamed from repl/src/main/scala/spark/repl/SparkHelper.scala)0
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala (renamed from repl/src/main/scala/spark/repl/SparkILoop.scala)18
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala (renamed from repl/src/main/scala/spark/repl/SparkIMain.scala)10
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala (renamed from repl/src/main/scala/spark/repl/SparkISettings.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkImports.scala (renamed from repl/src/main/scala/spark/repl/SparkImports.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala (renamed from repl/src/main/scala/spark/repl/SparkJLineCompletion.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala (renamed from repl/src/main/scala/spark/repl/SparkJLineReader.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala (renamed from repl/src/main/scala/spark/repl/SparkMemberHandlers.scala)2
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala (renamed from repl/src/test/scala/spark/repl/ReplSuite.scala)6
12 files changed, 30 insertions, 30 deletions
diff --git a/repl/pom.xml b/repl/pom.xml
index f800664cff..f6276f1895 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-repl</artifactId>
<packaging>jar</packaging>
<name>Spark Project REPL</name>
@@ -38,18 +38,18 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-mllib</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
@@ -136,7 +136,7 @@
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-yarn</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/repl/src/main/scala/spark/repl/ExecutorClassLoader.scala b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
index 274bc585db..3e171849e3 100644
--- a/repl/src/main/scala/spark/repl/ExecutorClassLoader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.repl
+package org.apache.spark.repl
import java.io.{ByteArrayOutputStream, InputStream}
import java.net.{URI, URL, URLClassLoader, URLEncoder}
diff --git a/repl/src/main/scala/spark/repl/Main.scala b/repl/src/main/scala/org/apache/spark/repl/Main.scala
index d824d62fd1..17e149f8ab 100644
--- a/repl/src/main/scala/spark/repl/Main.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/Main.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.repl
+package org.apache.spark.repl
import scala.collection.mutable.Set
diff --git a/repl/src/main/scala/spark/repl/SparkHelper.scala b/repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala
index d8fb7191b4..d8fb7191b4 100644
--- a/repl/src/main/scala/spark/repl/SparkHelper.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 0cecbd71ad..193ccb48ee 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -3,7 +3,7 @@
* @author Alexander Spoon
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
@@ -22,8 +22,8 @@ import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
import interpreter._
import io.{ File, Sources }
-import spark.Logging
-import spark.SparkContext
+import org.apache.spark.Logging
+import org.apache.spark.SparkContext
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -816,13 +816,13 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
def initializeSpark() {
intp.beQuietDuring {
command("""
- spark.repl.Main.interp.out.println("Creating SparkContext...");
- spark.repl.Main.interp.out.flush();
- @transient val sc = spark.repl.Main.interp.createSparkContext();
- spark.repl.Main.interp.out.println("Spark context available as sc.");
- spark.repl.Main.interp.out.flush();
+ org.apache.spark.repl.Main.interp.out.println("Creating SparkContext...");
+ org.apache.spark.repl.Main.interp.out.flush();
+ @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext();
+ org.apache.spark.repl.Main.interp.out.println("Spark context available as sc.");
+ org.apache.spark.repl.Main.interp.out.flush();
""")
- command("import spark.SparkContext._")
+ command("import org.apache.spark.SparkContext._")
}
echo("Type in expressions to have them evaluated.")
echo("Type :help for more information.")
diff --git a/repl/src/main/scala/spark/repl/SparkIMain.scala b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
index 43b6a6c950..7e244e48a2 100644
--- a/repl/src/main/scala/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -3,7 +3,7 @@
* @author Martin Odersky
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
@@ -27,9 +27,9 @@ import scala.util.control.Exception.{ ultimately }
import scala.reflect.NameTransformer
import SparkIMain._
-import spark.HttpServer
-import spark.Utils
-import spark.SparkEnv
+import org.apache.spark.HttpServer
+import org.apache.spark.Utils
+import org.apache.spark.SparkEnv
/** An interpreter for Scala code.
*
@@ -883,7 +883,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
val execution = lineManager.set(originalLine) {
// MATEI: set the right SparkEnv for our SparkContext, because
// this execution will happen in a separate thread
- val sc = spark.repl.Main.interp.sparkContext
+ val sc = org.apache.spark.repl.Main.interp.sparkContext
if (sc != null && sc.env != null)
SparkEnv.set(sc.env)
// Execute the line
diff --git a/repl/src/main/scala/spark/repl/SparkISettings.scala b/repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala
index 8ebb01d146..605b7b259b 100644
--- a/repl/src/main/scala/spark/repl/SparkISettings.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala
@@ -3,7 +3,7 @@
* @author Alexander Spoon
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkImports.scala b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
index 5caf5ca51a..41a1731d60 100644
--- a/repl/src/main/scala/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -3,7 +3,7 @@
* @author Paul Phillips
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala b/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
index 0069d8b2f4..fdc172d753 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
@@ -3,7 +3,7 @@
* @author Paul Phillips
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkJLineReader.scala b/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
index ef6b6e092e..d9e1de105c 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineReader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
@@ -3,7 +3,7 @@
* @author Stepan Koltsov
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala b/repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
index 2980dfcd76..a3409bf665 100644
--- a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
@@ -3,7 +3,7 @@
* @author Martin Odersky
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/test/scala/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 80ae605558..8f9b632c0e 100644
--- a/repl/src/test/scala/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.repl
+package org.apache.spark.repl
import java.io._
import java.net.URLClassLoader
@@ -41,10 +41,10 @@ class ReplSuite extends FunSuite {
}
}
val interp = new SparkILoop(in, new PrintWriter(out), master)
- spark.repl.Main.interp = interp
+ org.apache.spark.repl.Main.interp = interp
val separator = System.getProperty("path.separator")
interp.process(Array("-classpath", paths.mkString(separator)))
- spark.repl.Main.interp = null
+ org.apache.spark.repl.Main.interp = null
if (interp.sparkContext != null) {
interp.sparkContext.stop()
}