aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjerryshao <saisai.shao@intel.com>2013-07-02 13:48:09 +0800
committerjerryshao <saisai.shao@intel.com>2013-07-24 14:57:47 +0800
commit5730193e0c8639b654f489c5956e31451d81b7db (patch)
treea8bf7ab87eec260bd6d44863b23a0e8e4bcf8f02
parenta79f6077f0b3bc0110f6e8e15de3068f296f3b81 (diff)
downloadspark-5730193e0c8639b654f489c5956e31451d81b7db.tar.gz
spark-5730193e0c8639b654f489c5956e31451d81b7db.tar.bz2
spark-5730193e0c8639b654f489c5956e31451d81b7db.zip
Fix some typos
-rw-r--r--core/src/main/scala/spark/SparkContext.scala2
-rw-r--r--core/src/main/scala/spark/SparkEnv.scala1
-rw-r--r--core/src/main/scala/spark/metrics/sink/JmxSink.scala10
-rw-r--r--core/src/main/scala/spark/metrics/source/JvmSource.scala6
-rw-r--r--core/src/main/scala/spark/metrics/source/Source.scala3
-rw-r--r--core/src/main/scala/spark/scheduler/DAGScheduler.scala3
-rw-r--r--project/SparkBuild.scala2
7 files changed, 11 insertions, 16 deletions
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index f1d9d5e442..1e59a4d47d 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -69,8 +69,6 @@ import spark.util.{MetadataCleaner, TimeStampedHashMap}
import ui.{SparkUI}
import spark.metrics._
-import scala.util.DynamicVariable
-
/**
* Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
* cluster, and can be used to create RDDs, accumulators and broadcast variables on that cluster.
diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/spark/SparkEnv.scala
index 204049d927..4a1d341f5d 100644
--- a/core/src/main/scala/spark/SparkEnv.scala
+++ b/core/src/main/scala/spark/SparkEnv.scala
@@ -31,7 +31,6 @@ import spark.network.ConnectionManager
import spark.serializer.{Serializer, SerializerManager}
import spark.util.AkkaUtils
import spark.api.python.PythonWorkerFactory
-import spark.metrics._
/**
diff --git a/core/src/main/scala/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/spark/metrics/sink/JmxSink.scala
index f097a631c0..6a40885b78 100644
--- a/core/src/main/scala/spark/metrics/sink/JmxSink.scala
+++ b/core/src/main/scala/spark/metrics/sink/JmxSink.scala
@@ -1,18 +1,18 @@
package spark.metrics.sink
-import java.util.Properties
-
import com.codahale.metrics.{JmxReporter, MetricRegistry}
+import java.util.Properties
+
class JmxSink(val property: Properties, val registry: MetricRegistry) extends Sink {
val reporter: JmxReporter = JmxReporter.forRegistry(registry).build()
-
+
override def start() {
reporter.start()
}
-
+
override def stop() {
reporter.stop()
}
-
+
}
diff --git a/core/src/main/scala/spark/metrics/source/JvmSource.scala b/core/src/main/scala/spark/metrics/source/JvmSource.scala
index 8f0870c1a0..79f505079c 100644
--- a/core/src/main/scala/spark/metrics/source/JvmSource.scala
+++ b/core/src/main/scala/spark/metrics/source/JvmSource.scala
@@ -3,15 +3,13 @@ package spark.metrics.source
import com.codahale.metrics.MetricRegistry
import com.codahale.metrics.jvm.{GarbageCollectorMetricSet, MemoryUsageGaugeSet}
-import java.util.{Map, HashMap => JHashMap}
-
class JvmSource extends Source {
val sourceName = "jvm"
val metricRegistry = new MetricRegistry()
-
+
val gcMetricSet = new GarbageCollectorMetricSet
val memGaugeSet = new MemoryUsageGaugeSet
-
+
metricRegistry.registerAll(gcMetricSet)
metricRegistry.registerAll(memGaugeSet)
}
diff --git a/core/src/main/scala/spark/metrics/source/Source.scala b/core/src/main/scala/spark/metrics/source/Source.scala
index 17cbe2f85a..5607e2c40a 100644
--- a/core/src/main/scala/spark/metrics/source/Source.scala
+++ b/core/src/main/scala/spark/metrics/source/Source.scala
@@ -1,9 +1,8 @@
package spark.metrics.source
-import com.codahale.metrics.MetricSet
import com.codahale.metrics.MetricRegistry
trait Source {
def sourceName: String
- def metricRegistry: MetricRegistry
+ def metricRegistry: MetricRegistry
}
diff --git a/core/src/main/scala/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/spark/scheduler/DAGScheduler.scala
index 781e49bdec..9b45fc2938 100644
--- a/core/src/main/scala/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/spark/scheduler/DAGScheduler.scala
@@ -30,7 +30,7 @@ import spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialRe
import spark.scheduler.cluster.TaskInfo
import spark.storage.{BlockManager, BlockManagerMaster}
import spark.util.{MetadataCleaner, TimeStampedHashMap}
-import spark.metrics.MetricsSystem
+
/**
* A Scheduler subclass that implements stage-oriented scheduling. It computes a DAG of stages for
* each job, keeps track of which RDDs and stage outputs are materialized, and computes a minimal
@@ -126,6 +126,7 @@ class DAGScheduler(
val resultStageToJob = new HashMap[Stage, ActiveJob]
val metadataCleaner = new MetadataCleaner("DAGScheduler", this.cleanup)
+
// Start a thread to run the DAGScheduler event loop
def start() {
new Thread("DAGScheduler") {
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 825319d3de..d4d70afdd5 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -179,7 +179,7 @@ object SparkBuild extends Build {
"net.liftweb" % "lift-json_2.9.2" % "2.5",
"org.apache.mesos" % "mesos" % "0.9.0-incubating",
"io.netty" % "netty-all" % "4.0.0.Beta2",
- "org.apache.derby" % "derby" % "10.4.2.0" % "test"
+ "org.apache.derby" % "derby" % "10.4.2.0" % "test",
"com.codahale.metrics" % "metrics-core" % "3.0.0",
"com.codahale.metrics" % "metrics-jvm" % "3.0.0"
) ++ (