From 4fd86e0e10149ad1803831a308a056c7105cbe67 Mon Sep 17 00:00:00 2001 From: Mingfei Date: Sat, 8 Jun 2013 15:45:47 +0800 Subject: delete test code for joblogger in SparkContext --- core/src/main/scala/spark/SparkContext.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'core/src') diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala index b67a2066c8..70a9d7698c 100644 --- a/core/src/main/scala/spark/SparkContext.scala +++ b/core/src/main/scala/spark/SparkContext.scala @@ -48,7 +48,6 @@ import spark.scheduler.local.LocalScheduler import spark.scheduler.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend} import spark.storage.{BlockManagerUI, StorageStatus, StorageUtils, RDDInfo} import spark.util.{MetadataCleaner, TimeStampedHashMap} -import spark.scheduler.JobLogger /** * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark @@ -510,7 +509,7 @@ class SparkContext( def addSparkListener(listener: SparkListener) { dagScheduler.sparkListeners += listener } - addSparkListener(new JobLogger) + /** * Return a map from the slave to the max memory available for caching and the remaining * memory available for caching. -- cgit v1.2.3