aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2012-09-27 21:32:06 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-09-27 21:32:06 -0700
commit4a138403efd876f177fe2ba43c1bd115b2858919 (patch)
tree0670b3a405664422085b13a3905a467ea8ef895a /core/src/main
parent009b0e37e7c284c531cc3c44d0e5b5b1476f1666 (diff)
downloadspark-4a138403efd876f177fe2ba43c1bd115b2858919.tar.gz
spark-4a138403efd876f177fe2ba43c1bd115b2858919.tar.bz2
spark-4a138403efd876f177fe2ba43c1bd115b2858919.zip
Fix a bug in JAR fetcher that made it always fetch the JAR
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/spark/KryoSerializer.scala2
-rw-r--r--core/src/main/scala/spark/SizeEstimator.scala16
-rw-r--r--core/src/main/scala/spark/scheduler/Task.scala4
3 files changed, 9 insertions, 13 deletions
diff --git a/core/src/main/scala/spark/KryoSerializer.scala b/core/src/main/scala/spark/KryoSerializer.scala
index 244d50f49c..8aa27a747b 100644
--- a/core/src/main/scala/spark/KryoSerializer.scala
+++ b/core/src/main/scala/spark/KryoSerializer.scala
@@ -161,7 +161,7 @@ trait KryoRegistrator {
}
class KryoSerializer extends Serializer with Logging {
- val kryo = createKryo()
+ lazy val kryo = createKryo()
val bufferSize = System.getProperty("spark.kryoserializer.buffer.mb", "32").toInt * 1024 * 1024
diff --git a/core/src/main/scala/spark/SizeEstimator.scala b/core/src/main/scala/spark/SizeEstimator.scala
index e5ad8b52dc..aadd475868 100644
--- a/core/src/main/scala/spark/SizeEstimator.scala
+++ b/core/src/main/scala/spark/SizeEstimator.scala
@@ -83,16 +83,12 @@ object SizeEstimator extends Logging {
hotSpotMBeanName, classOf[HotSpotDiagnosticMXBean]);
return bean.getVMOption("UseCompressedOops").getValue.toBoolean
} catch {
- case e: IllegalArgumentException => {
- logWarning("Exception while trying to check if compressed oops is enabled", e)
- // Fall back to checking if maxMemory < 32GB
- return Runtime.getRuntime.maxMemory < (32L*1024*1024*1024)
- }
-
- case e: SecurityException => {
- logWarning("No permission to create MBeanServer", e)
- // Fall back to checking if maxMemory < 32GB
- return Runtime.getRuntime.maxMemory < (32L*1024*1024*1024)
+ case e: Exception => {
+ // Guess whether they've enabled UseCompressedOops based on whether maxMemory < 32 GB
+ val guess = Runtime.getRuntime.maxMemory < (32L*1024*1024*1024)
+ val guessInWords = if (guess) "yes" else "not"
+ logWarning("Failed to check whether UseCompressedOops is set; assuming " + guessInWords)
+ return guess
}
}
}
diff --git a/core/src/main/scala/spark/scheduler/Task.scala b/core/src/main/scala/spark/scheduler/Task.scala
index 0d5b71b06c..6128e0b273 100644
--- a/core/src/main/scala/spark/scheduler/Task.scala
+++ b/core/src/main/scala/spark/scheduler/Task.scala
@@ -24,14 +24,14 @@ abstract class Task[T](val stageId: Int) extends Serializable {
// Fetch missing file dependencies
fileSet.filter { case(k,v) =>
- !currentFileSet.contains(k) || currentFileSet(k) <= v
+ !currentFileSet.contains(k) || currentFileSet(k) < v
}.foreach { case (k,v) =>
Utils.fetchFile(k, new File(System.getProperty("user.dir")))
currentFileSet(k) = v
}
// Fetch missing jar dependencies
jarSet.filter { case(k,v) =>
- !currentJarSet.contains(k) || currentJarSet(k) <= v
+ !currentJarSet.contains(k) || currentJarSet(k) < v
}.foreach { case (k,v) =>
Utils.fetchFile(k, new File(System.getProperty("user.dir")))
currentJarSet(k) = v