aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2013-04-29 19:56:28 +0530
committerPrashant Sharma <prashant.s@imaginea.com>2013-04-29 19:56:28 +0530
commit24bbf318b3e8e657f911204b84f2d032fb1ff6e2 (patch)
tree8f9f2233f20a9bbd860e456a8e83ab856a53e3b8
parentd3518f57cd833f6297860c6344e685b9bf0210f5 (diff)
downloadspark-24bbf318b3e8e657f911204b84f2d032fb1ff6e2.tar.gz
spark-24bbf318b3e8e657f911204b84f2d032fb1ff6e2.tar.bz2
spark-24bbf318b3e8e657f911204b84f2d032fb1ff6e2.zip
Fixied other warnings
-rw-r--r--core/src/main/scala/spark/TaskState.scala3
-rw-r--r--core/src/main/scala/spark/api/python/PythonRDD.scala4
-rw-r--r--core/src/main/scala/spark/deploy/ExecutorState.scala3
-rw-r--r--core/src/main/scala/spark/deploy/master/ApplicationState.scala3
-rw-r--r--core/src/main/scala/spark/deploy/master/WorkerState.scala2
-rw-r--r--core/src/main/scala/spark/util/AkkaUtils.scala4
-rw-r--r--repl/src/main/scala/spark/repl/SparkIMain.scala2
-rw-r--r--streaming/src/main/scala/spark/streaming/DStream.scala4
8 files changed, 10 insertions, 15 deletions
diff --git a/core/src/main/scala/spark/TaskState.scala b/core/src/main/scala/spark/TaskState.scala
index 78eb33a628..44893ef089 100644
--- a/core/src/main/scala/spark/TaskState.scala
+++ b/core/src/main/scala/spark/TaskState.scala
@@ -2,8 +2,7 @@ package spark
import org.apache.mesos.Protos.{TaskState => MesosTaskState}
-private[spark] object TaskState
- extends Enumeration("LAUNCHING", "RUNNING", "FINISHED", "FAILED", "KILLED", "LOST") {
+private[spark] object TaskState extends Enumeration {
val LAUNCHING, RUNNING, FINISHED, FAILED, KILLED, LOST = Value
diff --git a/core/src/main/scala/spark/api/python/PythonRDD.scala b/core/src/main/scala/spark/api/python/PythonRDD.scala
index 82959a33eb..220047c360 100644
--- a/core/src/main/scala/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/spark/api/python/PythonRDD.scala
@@ -134,7 +134,7 @@ private[spark] class PythonRDD[T: ClassTag](
}
new Array[Byte](0)
}
- case e => throw e
+ case e : Throwable => throw e
}
}
@@ -233,7 +233,7 @@ private[spark] object PythonRDD {
}
} catch {
case eof: EOFException => {}
- case e => throw e
+ case e : Throwable => throw e
}
JavaRDD.fromRDD(sc.sc.parallelize(objs, parallelism))
}
diff --git a/core/src/main/scala/spark/deploy/ExecutorState.scala b/core/src/main/scala/spark/deploy/ExecutorState.scala
index 5dc0c54552..a487e11f2c 100644
--- a/core/src/main/scala/spark/deploy/ExecutorState.scala
+++ b/core/src/main/scala/spark/deploy/ExecutorState.scala
@@ -1,7 +1,6 @@
package spark.deploy
-private[spark] object ExecutorState
- extends Enumeration("LAUNCHING", "LOADING", "RUNNING", "KILLED", "FAILED", "LOST") {
+private[spark] object ExecutorState extends Enumeration {
val LAUNCHING, LOADING, RUNNING, KILLED, FAILED, LOST = Value
diff --git a/core/src/main/scala/spark/deploy/master/ApplicationState.scala b/core/src/main/scala/spark/deploy/master/ApplicationState.scala
index 15016b388d..4f359711c5 100644
--- a/core/src/main/scala/spark/deploy/master/ApplicationState.scala
+++ b/core/src/main/scala/spark/deploy/master/ApplicationState.scala
@@ -1,7 +1,6 @@
package spark.deploy.master
-private[spark] object ApplicationState
- extends Enumeration("WAITING", "RUNNING", "FINISHED", "FAILED") {
+private[spark] object ApplicationState extends Enumeration {
type ApplicationState = Value
diff --git a/core/src/main/scala/spark/deploy/master/WorkerState.scala b/core/src/main/scala/spark/deploy/master/WorkerState.scala
index 0bf35014c8..1e347bee20 100644
--- a/core/src/main/scala/spark/deploy/master/WorkerState.scala
+++ b/core/src/main/scala/spark/deploy/master/WorkerState.scala
@@ -1,6 +1,6 @@
package spark.deploy.master
-private[spark] object WorkerState extends Enumeration("ALIVE", "DEAD", "DECOMMISSIONED") {
+private[spark] object WorkerState extends Enumeration {
type WorkerState = Value
val ALIVE, DEAD, DECOMMISSIONED = Value
diff --git a/core/src/main/scala/spark/util/AkkaUtils.scala b/core/src/main/scala/spark/util/AkkaUtils.scala
index 70338ec4dc..e16915c8e9 100644
--- a/core/src/main/scala/spark/util/AkkaUtils.scala
+++ b/core/src/main/scala/spark/util/AkkaUtils.scala
@@ -61,7 +61,7 @@ private[spark] object AkkaUtils {
* Creates a Spray HTTP server bound to a given IP and port with a given Spray Route object to
* handle requests. Returns the bound port or throws a SparkException on failure.
*/
- def startSprayServer(actorSystem: ActorSystem, ip: String, port: Int, route: Route) {
+ def startSprayServer(actorSystem: ActorSystem, ip: String, port: Int, route: Route) = {
val ioWorker = IOExtension(actorSystem).ioBridge()
val httpService = actorSystem.actorOf(Props(HttpServiceActor(route)))
val server = actorSystem.actorOf(
@@ -72,7 +72,7 @@ private[spark] object AkkaUtils {
try {
Await.result(future, timeout) match {
case bound: HttpServer.Bound =>
- return server
+ server
case other: Any =>
throw new SparkException("Failed to bind web UI to port " + port + ": " + other)
}
diff --git a/repl/src/main/scala/spark/repl/SparkIMain.scala b/repl/src/main/scala/spark/repl/SparkIMain.scala
index 9894429ec1..b1977d6788 100644
--- a/repl/src/main/scala/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/spark/repl/SparkIMain.scala
@@ -809,7 +809,7 @@ import spark.Logging
// val readRoot = getRequiredModule(readPath) // the outermost wrapper
// MATEI: Changed this to getClass because the root object is no longer a module (Scala singleton object)
- val readRoot = definitions.getClass(newTypeName(readPath)) // the outermost wrapper
+ val readRoot = rootMirror.getClassByName(newTypeName(readPath)) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot: Symbol) {
case (sym, "") => sym
case (sym, name) => afterTyper(termMember(sym, name))
diff --git a/streaming/src/main/scala/spark/streaming/DStream.scala b/streaming/src/main/scala/spark/streaming/DStream.scala
index c307c69611..6ad43dd9b5 100644
--- a/streaming/src/main/scala/spark/streaming/DStream.scala
+++ b/streaming/src/main/scala/spark/streaming/DStream.scala
@@ -466,9 +466,7 @@ abstract class DStream[T: ClassTag] (
* this DStream will be registered as an output stream and therefore materialized.
*/
def foreach(foreachFunc: (RDD[T], Time) => Unit) {
- val newStream = new ForEachDStream(this, context.sparkContext.clean(foreachFunc))
- ssc.registerOutputStream(newStream)
- newStream
+ ssc.registerOutputStream(new ForEachDStream(this, context.sparkContext.clean(foreachFunc)))
}
/**