From 7a295fee9641e7b9480dd5cb520afe45039ffbe0 Mon Sep 17 00:00:00 2001 From: Denny Date: Tue, 31 Jul 2012 08:39:24 -0700 Subject: Spark WebUI Implementation. --- project/SparkBuild.scala | 3 ++- project/plugins.sbt | 8 ++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) (limited to 'project') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 726d490738..d1445f2ade 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -2,6 +2,7 @@ import sbt._ import Keys._ import sbtassembly.Plugin._ import AssemblyKeys._ +import twirl.sbt.TwirlPlugin._ object SparkBuild extends Build { // Hadoop version to build against. For example, "0.20.2", "0.20.205.0", or @@ -69,7 +70,7 @@ object SparkBuild extends Build { "cc.spray" % "spray-can" % "1.0-M2.1", "cc.spray" % "spray-server" % "1.0-M2.1" ) - ) ++ assemblySettings ++ extraAssemblySettings + ) ++ assemblySettings ++ extraAssemblySettings ++ Seq(Twirl.settings: _*) def replSettings = sharedSettings ++ Seq( name := "spark-repl", diff --git a/project/plugins.sbt b/project/plugins.sbt index 0e2b6d4902..896fa4834f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,9 +1,13 @@ -resolvers += Classpaths.typesafeResolver - resolvers += Resolver.url("artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns) +resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/" + +resolvers += "Spray Repository" at "http://repo.spray.cc/" + addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.8.3") addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.1.0-RC1") addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.0.0") + +addSbtPlugin("cc.spray" %% "sbt-twirl" % "0.5.2") -- cgit v1.2.3 From 039b41cb54e7b24563e7a456b7ed7a89c9c0c2bd Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Thu, 2 Aug 2012 10:12:00 -0700 Subject: Use sbt mergeStrategy for reference.conf files. Cleans up #158 / 509b721. --- project/SparkBuild.scala | 58 +++--------------------------------------------- 1 file changed, 3 insertions(+), 55 deletions(-) (limited to 'project') diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 3603d42a23..cb21bfba39 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -1,7 +1,5 @@ import sbt._ import Keys._ -import classpath.ClasspathUtilities.isArchive -import java.io.FileOutputStream import sbtassembly.Plugin._ import AssemblyKeys._ @@ -71,12 +69,12 @@ object SparkBuild extends Build { "cc.spray" % "spray-can" % "1.0-M2.1", "cc.spray" % "spray-server" % "1.0-M2.1" ) - ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings + ) ++ assemblySettings ++ extraAssemblySettings def replSettings = sharedSettings ++ Seq( name := "spark-repl", libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _) - ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings + ) ++ assemblySettings ++ extraAssemblySettings def examplesSettings = sharedSettings ++ Seq( name := "spark-examples" @@ -84,60 +82,10 @@ object SparkBuild extends Build { def bagelSettings = sharedSettings ++ Seq(name := "spark-bagel") - // Fix for "No configuration setting found for key 'akka.version'" exception - // when running Spark from the jar generated by the "assembly" task; see - // http://letitcrash.com/post/21025950392/howto-sbt-assembly-vs-reference-conf - lazy val merge = TaskKey[File]("merge-reference", - "merge all reference.conf") - - lazy val mergeSettings: Seq[Project.Setting[_]] = Seq( - merge <<= (fullClasspath in assembly) map { - c => - // collect from all elements of the full classpath - val (libs, dirs) = - c map (_.data) partition (isArchive) - // goal is to simply concatenate files here - val dest = file("reference.conf") - val out = new FileOutputStream(dest) - val append = IO.transfer(_: File, out) - try { - // first collect from managed sources - (dirs * "reference.conf").get foreach append - // then from dependency jars by unzipping and - // collecting reference.conf if present - for (lib <- libs) { - IO withTemporaryDirectory { - dir => - IO.unzip(lib, dir, "reference.conf") - (dir * "reference.conf").get foreach append - } - } - // return merged file location as task result - dest - } finally { - out.close() - } - }, - - // get rid of the individual files from jars - excludedFiles in assembly <<= - (excludedFiles in assembly) { - (old) => (bases) => - old(bases) ++ (bases flatMap (base => - (base / "reference.conf").get)) - }, - - // tell sbt-assembly to include our merged file - assembledMappings in assembly <<= - (assembledMappings in assembly, merge) map { - (old, merged) => (f) => - old(f) :+(merged, "reference.conf") - } - ) - def extraAssemblySettings() = Seq(test in assembly := {}) ++ Seq( mergeStrategy in assembly := { case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard + case "reference.conf" => MergeStrategy.concat case _ => MergeStrategy.first } ) -- cgit v1.2.3 From ba7e30fb5ee31ab3f99dcb95148f0c95bf3f45b4 Mon Sep 17 00:00:00 2001 From: Denny Date: Thu, 2 Aug 2012 13:55:09 -0700 Subject: Mostly stlyistic changes. --- core/src/main/scala/spark/deploy/DeployMessage.scala | 7 +++++-- core/src/main/scala/spark/deploy/master/MasterWebUI.scala | 5 +++-- core/src/main/scala/spark/deploy/worker/Worker.scala | 4 +++- project/SparkBuild.scala | 2 +- 4 files changed, 12 insertions(+), 6 deletions(-) (limited to 'project') diff --git a/core/src/main/scala/spark/deploy/DeployMessage.scala b/core/src/main/scala/spark/deploy/DeployMessage.scala index b764475c12..141bbe4d57 100644 --- a/core/src/main/scala/spark/deploy/DeployMessage.scala +++ b/core/src/main/scala/spark/deploy/DeployMessage.scala @@ -57,11 +57,14 @@ case object RequestMasterState // Master to MasterWebUI -case class MasterState(uri : String, workers: List[WorkerInfo], activeJobs: List[JobInfo], completedJobs: List[JobInfo]) +case class MasterState(uri : String, workers: List[WorkerInfo], activeJobs: List[JobInfo], + completedJobs: List[JobInfo]) // WorkerWebUI to Worker case object RequestWorkerState // Worker to WorkerWebUI -case class WorkerState(uri: String, workerId: String, executors: List[ExecutorRunner], finishedExecutors: List[ExecutorRunner], masterUrl: String, cores: Int, memory: Int, coresUsed: Int, memoryUsed: Int, masterWebUiUrl: String) \ No newline at end of file +case class WorkerState(uri: String, workerId: String, executors: List[ExecutorRunner], + finishedExecutors: List[ExecutorRunner], masterUrl: String, cores: Int, memory: Int, + coresUsed: Int, memoryUsed: Int, masterWebUiUrl: String) \ No newline at end of file diff --git a/core/src/main/scala/spark/deploy/master/MasterWebUI.scala b/core/src/main/scala/spark/deploy/master/MasterWebUI.scala index 0dce4c4abd..cb94174bcc 100644 --- a/core/src/main/scala/spark/deploy/master/MasterWebUI.scala +++ b/core/src/main/scala/spark/deploy/master/MasterWebUI.scala @@ -26,8 +26,9 @@ class MasterWebUI(val actorSystem: ActorSystem, master: ActorRef) extends Direct path("job") { parameter("jobId") { jobId => completeWith { - val masterState = getMasterState - // A bit ugly an inefficient, but we won't have a number of jobs so large that it will make a significant difference. + val masterState = getMasterState() + // A bit ugly an inefficient, but we won't have a number of jobs + // so large that it will make a significant difference. (masterState.activeJobs ::: masterState.completedJobs).find(_.id == jobId) match { case Some(job) => masterui.html.job_details.render(job) case _ => null diff --git a/core/src/main/scala/spark/deploy/worker/Worker.scala b/core/src/main/scala/spark/deploy/worker/Worker.scala index ffe6bb8cb4..8647f118ee 100644 --- a/core/src/main/scala/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/spark/deploy/worker/Worker.scala @@ -136,7 +136,9 @@ class Worker(ip: String, port: Int, webUiPort: Int, cores: Int, memory: Int, mas masterDisconnected() case RequestWorkerState => { - sender ! WorkerState(ip + ":" + port, workerId, executors.values.toList, finishedExecutors.values.toList, masterUrl, cores, memory, coresUsed, memoryUsed, masterWebUiUrl) + sender ! WorkerState(ip + ":" + port, workerId, executors.values.toList, + finishedExecutors.values.toList, masterUrl, cores, memory, + coresUsed, memoryUsed, masterWebUiUrl) } } diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 684108677f..a244f9c229 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -72,7 +72,7 @@ object SparkBuild extends Build { "cc.spray" % "spray-can" % "1.0-M2.1", "cc.spray" % "spray-server" % "1.0-M2.1" ) - ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings ++ Seq(Twirl.settings: _*) + ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings ++ Twirl.settings def replSettings = sharedSettings ++ Seq( name := "spark-repl", -- cgit v1.2.3