From 4a138403efd876f177fe2ba43c1bd115b2858919 Mon Sep 17 00:00:00 2001 From: Matei Zaharia Date: Thu, 27 Sep 2012 21:32:06 -0700 Subject: Fix a bug in JAR fetcher that made it always fetch the JAR --- core/src/main/scala/spark/KryoSerializer.scala | 2 +- core/src/main/scala/spark/SizeEstimator.scala | 16 ++++++---------- core/src/main/scala/spark/scheduler/Task.scala | 4 ++-- 3 files changed, 9 insertions(+), 13 deletions(-) (limited to 'core/src/main') diff --git a/core/src/main/scala/spark/KryoSerializer.scala b/core/src/main/scala/spark/KryoSerializer.scala index 244d50f49c..8aa27a747b 100644 --- a/core/src/main/scala/spark/KryoSerializer.scala +++ b/core/src/main/scala/spark/KryoSerializer.scala @@ -161,7 +161,7 @@ trait KryoRegistrator { } class KryoSerializer extends Serializer with Logging { - val kryo = createKryo() + lazy val kryo = createKryo() val bufferSize = System.getProperty("spark.kryoserializer.buffer.mb", "32").toInt * 1024 * 1024 diff --git a/core/src/main/scala/spark/SizeEstimator.scala b/core/src/main/scala/spark/SizeEstimator.scala index e5ad8b52dc..aadd475868 100644 --- a/core/src/main/scala/spark/SizeEstimator.scala +++ b/core/src/main/scala/spark/SizeEstimator.scala @@ -83,16 +83,12 @@ object SizeEstimator extends Logging { hotSpotMBeanName, classOf[HotSpotDiagnosticMXBean]); return bean.getVMOption("UseCompressedOops").getValue.toBoolean } catch { - case e: IllegalArgumentException => { - logWarning("Exception while trying to check if compressed oops is enabled", e) - // Fall back to checking if maxMemory < 32GB - return Runtime.getRuntime.maxMemory < (32L*1024*1024*1024) - } - - case e: SecurityException => { - logWarning("No permission to create MBeanServer", e) - // Fall back to checking if maxMemory < 32GB - return Runtime.getRuntime.maxMemory < (32L*1024*1024*1024) + case e: Exception => { + // Guess whether they've enabled UseCompressedOops based on whether maxMemory < 32 GB + val guess = Runtime.getRuntime.maxMemory < (32L*1024*1024*1024) + val guessInWords = if (guess) "yes" else "not" + logWarning("Failed to check whether UseCompressedOops is set; assuming " + guessInWords) + return guess } } } diff --git a/core/src/main/scala/spark/scheduler/Task.scala b/core/src/main/scala/spark/scheduler/Task.scala index 0d5b71b06c..6128e0b273 100644 --- a/core/src/main/scala/spark/scheduler/Task.scala +++ b/core/src/main/scala/spark/scheduler/Task.scala @@ -24,14 +24,14 @@ abstract class Task[T](val stageId: Int) extends Serializable { // Fetch missing file dependencies fileSet.filter { case(k,v) => - !currentFileSet.contains(k) || currentFileSet(k) <= v + !currentFileSet.contains(k) || currentFileSet(k) < v }.foreach { case (k,v) => Utils.fetchFile(k, new File(System.getProperty("user.dir"))) currentFileSet(k) = v } // Fetch missing jar dependencies jarSet.filter { case(k,v) => - !currentJarSet.contains(k) || currentJarSet(k) <= v + !currentJarSet.contains(k) || currentJarSet(k) < v }.foreach { case (k,v) => Utils.fetchFile(k, new File(System.getProperty("user.dir"))) currentJarSet(k) = v -- cgit v1.2.3