aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-01-03 14:37:42 +0530
committerPrashant Sharma <prashant.s@imaginea.com>2014-01-03 14:43:37 +0530
commit94f2fffa23436ed66a24c705f88dabe59bf54037 (patch)
treea83d3f9e512183903e1b69fccdd0d2bcef1663ab /project
parentb4bb80002bbf0ac3642c78ae9e5c260b5da4a4cc (diff)
downloadspark-94f2fffa23436ed66a24c705f88dabe59bf54037.tar.gz
spark-94f2fffa23436ed66a24c705f88dabe59bf54037.tar.bz2
spark-94f2fffa23436ed66a24c705f88dabe59bf54037.zip
fixed review comments
Diffstat (limited to 'project')
-rw-r--r--project/SparkBuild.scala14
1 files changed, 9 insertions, 5 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index b335b5a20a..8290e7cf43 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -20,6 +20,7 @@ import sbt.Classpaths.publishTask
import Keys._
import sbtassembly.Plugin._
import AssemblyKeys._
+import scala.util.Properties
// For Sonatype publishing
//import com.jsuereth.pgp.sbtplugin.PgpKeys._
@@ -68,10 +69,12 @@ object SparkBuild extends Build {
// A configuration to set an alternative publishLocalConfiguration
lazy val MavenCompile = config("m2r") extend(Compile)
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
-
+ val sparkHome = System.getProperty("user.dir")
+ System.setProperty("spark.home", sparkHome)
+ System.setProperty("spark.testing", "1")
// Allows build configuration to be set through environment variables
- lazy val hadoopVersion = scala.util.Properties.envOrElse("SPARK_HADOOP_VERSION", DEFAULT_HADOOP_VERSION)
- lazy val isNewHadoop = scala.util.Properties.envOrNone("SPARK_IS_NEW_HADOOP") match {
+ lazy val hadoopVersion = Properties.envOrElse("SPARK_HADOOP_VERSION", DEFAULT_HADOOP_VERSION)
+ lazy val isNewHadoop = Properties.envOrNone("SPARK_IS_NEW_HADOOP") match {
case None => {
val isNewHadoopVersion = "2.[2-9]+".r.findFirstIn(hadoopVersion).isDefined
(isNewHadoopVersion|| DEFAULT_IS_NEW_HADOOP)
@@ -79,7 +82,7 @@ object SparkBuild extends Build {
case Some(v) => v.toBoolean
}
- lazy val isYarnEnabled = scala.util.Properties.envOrNone("SPARK_YARN") match {
+ lazy val isYarnEnabled = Properties.envOrNone("SPARK_YARN") match {
case None => DEFAULT_YARN
case Some(v) => v.toBoolean
}
@@ -112,8 +115,9 @@ object SparkBuild extends Build {
// Fork new JVMs for tests and set Java options for those
fork := true,
+ javaOptions += "-Dspark.home=" + sparkHome,
+ javaOptions += "-Dspark.testing=1",
javaOptions += "-Xmx3g",
-
// Show full stack trace and duration in test cases.
testOptions in Test += Tests.Argument("-oDF"),