aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2013-04-22 14:13:56 +0530
committerPrashant Sharma <prashant.s@imaginea.com>2013-04-22 14:14:03 +0530
commit185bb9525a3a48313cd5e446e1b80d2d697465d8 (patch)
treebdbab31c488d6140973ef99d93cc8ff959cad50f /repl
parent4b57f83209b94f87890ef307af45fa493e7fdba8 (diff)
parent17e076de800ea0d4c55f2bd657348641f6f9c55b (diff)
downloadspark-185bb9525a3a48313cd5e446e1b80d2d697465d8.tar.gz
spark-185bb9525a3a48313cd5e446e1b80d2d697465d8.tar.bz2
spark-185bb9525a3a48313cd5e446e1b80d2d697465d8.zip
Manually merged scala-2.10 and master
Diffstat (limited to 'repl')
-rw-r--r--repl/pom.xml39
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoop.scala17
-rw-r--r--repl/src/test/resources/log4j.properties4
3 files changed, 44 insertions, 16 deletions
diff --git a/repl/pom.xml b/repl/pom.xml
index 38e883c7f8..0b5e400c3d 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -3,8 +3,8 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.spark-project</groupId>
- <artifactId>parent</artifactId>
- <version>0.7.0-SNAPSHOT</version>
+ <artifactId>spark-parent</artifactId>
+ <version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@@ -72,11 +72,6 @@
<profiles>
<profile>
<id>hadoop1</id>
- <activation>
- <property>
- <name>!hadoopVersion</name>
- </property>
- </activation>
<properties>
<classifier>hadoop1</classifier>
</properties>
@@ -102,6 +97,13 @@
<scope>runtime</scope>
</dependency>
<dependency>
+ <groupId>org.spark-project</groupId>
+ <artifactId>spark-streaming</artifactId>
+ <version>${project.version}</version>
+ <classifier>hadoop1</classifier>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<scope>provided</scope>
@@ -121,12 +123,6 @@
</profile>
<profile>
<id>hadoop2</id>
- <activation>
- <property>
- <name>hadoopVersion</name>
- <value>2</value>
- </property>
- </activation>
<properties>
<classifier>hadoop2</classifier>
</properties>
@@ -152,6 +148,13 @@
<scope>runtime</scope>
</dependency>
<dependency>
+ <groupId>org.spark-project</groupId>
+ <artifactId>spark-streaming</artifactId>
+ <version>${project.version}</version>
+ <classifier>hadoop2</classifier>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<scope>provided</scope>
@@ -161,6 +164,16 @@
<artifactId>hadoop-client</artifactId>
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.avro</groupId>
+ <artifactId>avro</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.avro</groupId>
+ <artifactId>avro-ipc</artifactId>
+ <scope>provided</scope>
+ </dependency>
</dependencies>
<build>
<plugins>
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 5a1e54c929..e83e403760 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -152,7 +152,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
finally in = saved
}
-
+
def sparkCleanUp(){
echo("Stopping spark context.")
intp.beQuietDuring {
@@ -222,6 +222,21 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
}
}
+ /** Print a welcome message */
+ def printWelcome() {
+ echo("""Welcome to
+ ____ __
+ / __/__ ___ _____/ /__
+ _\ \/ _ \/ _ `/ __/ '_/
+ /___/ .__/\_,_/_/ /_/\_\ version 0.8.0
+ /_/
+""")
+ import Properties._
+ val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
+ versionString, javaVmName, javaVersion)
+ echo(welcomeMsg)
+ }
+
/** Show the history */
lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
override def usage = "[num]"
diff --git a/repl/src/test/resources/log4j.properties b/repl/src/test/resources/log4j.properties
index 4c99e450bc..cfb1a390e6 100644
--- a/repl/src/test/resources/log4j.properties
+++ b/repl/src/test/resources/log4j.properties
@@ -1,8 +1,8 @@
-# Set everything to be logged to the console
+# Set everything to be logged to the repl/target/unit-tests.log
log4j.rootCategory=INFO, file
log4j.appender.file=org.apache.log4j.FileAppender
log4j.appender.file.append=false
-log4j.appender.file.file=spark-tests.log
+log4j.appender.file.file=repl/target/unit-tests.log
log4j.appender.file.layout=org.apache.log4j.PatternLayout
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %p %c{1}: %m%n