aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-03-26 15:05:13 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-03-26 15:05:13 -0700
commitbc36ee4fbbe3ad3b7e15fbdba53de42a29b81a9f (patch)
tree40e825bea310e1240932873efebbdffc417a8152
parent6c18131487e8537da19e2daf45b385c5af494ea3 (diff)
parent1f5381119f8c8afd0ba69bc7773c10972dd43bc1 (diff)
downloadspark-bc36ee4fbbe3ad3b7e15fbdba53de42a29b81a9f.tar.gz
spark-bc36ee4fbbe3ad3b7e15fbdba53de42a29b81a9f.tar.bz2
spark-bc36ee4fbbe3ad3b7e15fbdba53de42a29b81a9f.zip
Merge pull request #543 from holdenk/master
Re-enable deprecation warnings and fix deprecated warning.
-rw-r--r--core/src/main/scala/spark/CacheManager.scala2
-rw-r--r--project/SparkBuild.scala2
-rw-r--r--streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala2
3 files changed, 3 insertions, 3 deletions
diff --git a/core/src/main/scala/spark/CacheManager.scala b/core/src/main/scala/spark/CacheManager.scala
index c7b379a3fb..f7a2b7e802 100644
--- a/core/src/main/scala/spark/CacheManager.scala
+++ b/core/src/main/scala/spark/CacheManager.scala
@@ -27,7 +27,7 @@ private[spark] class CacheManager(blockManager: BlockManager) extends Logging {
if (loading.contains(key)) {
logInfo("Loading contains " + key + ", waiting...")
while (loading.contains(key)) {
- try {loading.wait()} catch {case _ =>}
+ try {loading.wait()} catch {case _ : Throwable =>}
}
logInfo("Loading no longer contains " + key + ", so returning cached result")
// See whether someone else has successfully loaded it. The main way this would fail
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d44bf3b5e3..5f378b2398 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -37,7 +37,7 @@ object SparkBuild extends Build {
organization := "org.spark-project",
version := "0.7.1-SNAPSHOT",
scalaVersion := "2.9.2",
- scalacOptions := Seq(/*"-deprecation",*/ "-unchecked", "-optimize"), // -deprecation is too noisy due to usage of old Hadoop API, enable it once that's no longer an issue
+ scalacOptions := Seq("-unchecked", "-optimize", "-deprecation"),
unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
retrieveManaged := true,
retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
diff --git a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala b/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala
index 6b310bc0b6..da224ad6f7 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala
+++ b/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala
@@ -28,7 +28,7 @@ class QueueInputDStream[T: ClassManifest](
}
if (buffer.size > 0) {
if (oneAtATime) {
- Some(buffer.first)
+ Some(buffer.head)
} else {
Some(new UnionRDD(ssc.sc, buffer.toSeq))
}