aboutsummaryrefslogtreecommitdiff
path: root/project/SparkBuild.scala
diff options
context:
space:
mode:
authorfolone <folone@gmail.com>2013-01-14 09:52:11 +0100
committerfolone <folone@gmail.com>2013-01-14 09:52:11 +0100
commit25c0739bad7222d45b4818c7bf6987521a3509d2 (patch)
treeef3f8c92b805f99db3b178853be376968fc8b6f4 /project/SparkBuild.scala
parentcb867e9ffb2c5e3d65d50c222fcce3631b94e4dd (diff)
downloadspark-25c0739bad7222d45b4818c7bf6987521a3509d2.tar.gz
spark-25c0739bad7222d45b4818c7bf6987521a3509d2.tar.bz2
spark-25c0739bad7222d45b4818c7bf6987521a3509d2.zip
Moved to scala 2.10.0. Notable changes are:
- akka 2.0.3 → 2.1.0 - spray 1.0-M1 → 1.1-M7 For now the repl subproject is commented out, as scala reflection api changed very much since the introduction of macros.
Diffstat (limited to 'project/SparkBuild.scala')
-rw-r--r--project/SparkBuild.scala70
1 files changed, 36 insertions, 34 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 219674028e..d0b3c350f1 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -17,11 +17,11 @@ object SparkBuild extends Build {
//val HADOOP_VERSION = "2.0.0-mr1-cdh4.1.1"
//val HADOOP_MAJOR_VERSION = "2"
- lazy val root = Project("root", file("."), settings = rootSettings) aggregate(core, repl, examples, bagel)
+ lazy val root = Project("root", file("."), settings = rootSettings) aggregate(core, /*repl,*/ examples, bagel)
lazy val core = Project("core", file("core"), settings = coreSettings)
- lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core)
+// lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core)
lazy val examples = Project("examples", file("examples"), settings = examplesSettings) dependsOn (core)
@@ -32,10 +32,10 @@ object SparkBuild extends Build {
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
def sharedSettings = Defaults.defaultSettings ++ Seq(
- organization := "org.spark-project",
- version := "0.7.0-SNAPSHOT",
- scalaVersion := "2.9.2",
- scalacOptions := Seq(/*"-deprecation",*/ "-unchecked", "-optimize"), // -deprecation is too noisy due to usage of old Hadoop API, enable it once that's no longer an issue
+ organization := "org.spark-project",
+ version := "0.7.0-SNAPSHOT",
+ scalaVersion := "2.10.0",
+ scalacOptions := Seq(/*"-deprecation",*/ "-unchecked", "-optimize"), // -deprecation is too noisy due to usage of old Hadoop API, enable it once that's no longer an issue
unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
retrieveManaged := true,
retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
@@ -87,11 +87,11 @@ object SparkBuild extends Build {
*/
libraryDependencies ++= Seq(
- "org.eclipse.jetty" % "jetty-server" % "7.5.3.v20111011",
- "org.scalatest" %% "scalatest" % "1.8" % "test",
- "org.scalacheck" %% "scalacheck" % "1.9" % "test",
- "com.novocode" % "junit-interface" % "0.8" % "test"
- ),
+ "org.eclipse.jetty" % "jetty-server" % "7.5.3.v20111011",
+ "org.scalatest" %% "scalatest" % "1.9.1" % "test",
+ "org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
+ "com.novocode" % "junit-interface" % "0.8" % "test"
+ ),
parallelExecution := false,
/* Workaround for issue #206 (fixed after SBT 0.11.0) */
watchTransitiveSources <<= Defaults.inDependencies[Task[Seq[File]]](watchSources.task,
@@ -112,31 +112,33 @@ object SparkBuild extends Build {
name := "spark-core",
resolvers ++= Seq(
"Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/",
- "JBoss Repository" at "http://repository.jboss.org/nexus/content/repositories/releases/",
- "Spray Repository" at "http://repo.spray.cc/",
+ "JBoss Repository" at "http://repository.jboss.org/nexus/content/repositories/releases/",
+ "Spray Repository" at "http://repo.spray.cc/",
"Cloudera Repository" at "https://repository.cloudera.com/artifactory/cloudera-repos/"
),
libraryDependencies ++= Seq(
- "com.google.guava" % "guava" % "11.0.1",
- "log4j" % "log4j" % "1.2.16",
- "org.slf4j" % "slf4j-api" % slf4jVersion,
- "org.slf4j" % "slf4j-log4j12" % slf4jVersion,
- "com.ning" % "compress-lzf" % "0.8.4",
- "org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION,
- "asm" % "asm-all" % "3.3.1",
- "com.google.protobuf" % "protobuf-java" % "2.4.1",
- "de.javakaffee" % "kryo-serializers" % "0.20",
- "com.typesafe.akka" % "akka-actor" % "2.0.3",
- "com.typesafe.akka" % "akka-remote" % "2.0.3",
- "com.typesafe.akka" % "akka-slf4j" % "2.0.3",
- "it.unimi.dsi" % "fastutil" % "6.4.4",
- "colt" % "colt" % "1.2.0",
- "cc.spray" % "spray-can" % "1.0-M2.1",
- "cc.spray" % "spray-server" % "1.0-M2.1",
- "cc.spray" %% "spray-json" % "1.1.1",
- "org.apache.mesos" % "mesos" % "0.9.0-incubating"
- ) ++ (if (HADOOP_MAJOR_VERSION == "2") Some("org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION) else None).toSeq,
+ "com.google.guava" % "guava" % "11.0.1",
+ "log4j" % "log4j" % "1.2.16",
+ "org.slf4j" % "slf4j-api" % slf4jVersion,
+ "org.slf4j" % "slf4j-log4j12" % slf4jVersion,
+ "com.ning" % "compress-lzf" % "0.8.4",
+ "org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION,
+ "asm" % "asm-all" % "3.3.1",
+ "com.google.protobuf" % "protobuf-java" % "2.4.1",
+ "de.javakaffee" % "kryo-serializers" % "0.20",
+ "com.typesafe.akka" %% "akka-remote" % "2.1.0",
+ "com.typesafe.akka" %% "akka-slf4j" % "2.1.0",
+ "it.unimi.dsi" % "fastutil" % "6.4.4",
+ "io.spray" % "spray-can" % "1.1-M7",
+ "io.spray" % "spray-io" % "1.1-M7",
+ "io.spray" % "spray-routing" % "1.1-M7",
+ "io.spray" %% "spray-json" % "1.2.3",
+ "colt" % "colt" % "1.2.0",
+ "org.apache.mesos" % "mesos" % "0.9.0-incubating",
+ "org.scala-lang" % "scala-actors" % "2.10.0"
+ ) ++ (if (HADOOP_MAJOR_VERSION == "2")
+ Some("org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION) else None).toSeq,
unmanagedSourceDirectories in Compile <+= baseDirectory{ _ / ("src/hadoop" + HADOOP_MAJOR_VERSION + "/scala") }
) ++ assemblySettings ++ extraAssemblySettings ++ Twirl.settings
@@ -144,10 +146,10 @@ object SparkBuild extends Build {
publish := {}
)
- def replSettings = sharedSettings ++ Seq(
+/* def replSettings = sharedSettings ++ Seq(
name := "spark-repl",
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)
- )
+ )*/
def examplesSettings = sharedSettings ++ Seq(
name := "spark-examples"