aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAaron Davidson <aaron@databricks.com>2014-03-19 17:56:48 -0700
committerAaron Davidson <aaron@databricks.com>2014-03-19 17:56:48 -0700
commitffe272d97c22955fe7744b1c0132cd9877b6df96 (patch)
treee1905d8e7cdd14bfa2575d4a3855985c8cc219ce /core
parent16789317a34c1974f7b35960f06a7b51d8e0f29f (diff)
downloadspark-ffe272d97c22955fe7744b1c0132cd9877b6df96.tar.gz
spark-ffe272d97c22955fe7744b1c0132cd9877b6df96.tar.bz2
spark-ffe272d97c22955fe7744b1c0132cd9877b6df96.zip
Revert "SPARK-1099:Spark's local mode should probably respect spark.cores.max by default"
This reverts commit 16789317a34c1974f7b35960f06a7b51d8e0f29f. Jenkins was not run for this PR.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala5
-rw-r--r--core/src/test/scala/org/apache/spark/FileSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala19
3 files changed, 6 insertions, 22 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 8f74607278..a1003b7925 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1262,10 +1262,7 @@ object SparkContext extends Logging {
master match {
case "local" =>
val scheduler = new TaskSchedulerImpl(sc, MAX_LOCAL_TASK_FAILURES, isLocal = true)
- // Use user specified in config, up to all available cores
- val realCores = Runtime.getRuntime.availableProcessors()
- val toUseCores = math.min(sc.conf.getInt("spark.cores.max", realCores), realCores)
- val backend = new LocalBackend(scheduler, toUseCores)
+ val backend = new LocalBackend(scheduler, 1)
scheduler.initialize(backend)
scheduler
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index b4a5881cd9..01af940771 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -34,7 +34,7 @@ import org.apache.spark.SparkContext._
class FileSuite extends FunSuite with LocalSparkContext {
test("text files") {
- sc = new SparkContext("local[1]", "test")
+ sc = new SparkContext("local", "test")
val tempDir = Files.createTempDir()
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 4)
@@ -176,7 +176,7 @@ class FileSuite extends FunSuite with LocalSparkContext {
test("write SequenceFile using new Hadoop API") {
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat
- sc = new SparkContext("local[1]", "test")
+ sc = new SparkContext("local", "test")
val tempDir = Files.createTempDir()
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), new Text("a" * x)))
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index 9dd42be1d7..b543471a5d 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -27,10 +27,10 @@ import org.apache.spark.scheduler.local.LocalBackend
class SparkContextSchedulerCreationSuite
extends FunSuite with PrivateMethodTester with LocalSparkContext with Logging {
- def createTaskScheduler(master: String, conf: SparkConf = new SparkConf()): TaskSchedulerImpl = {
+ def createTaskScheduler(master: String): TaskSchedulerImpl = {
// Create local SparkContext to setup a SparkEnv. We don't actually want to start() the
// real schedulers, so we don't want to create a full SparkContext with the desired scheduler.
- sc = new SparkContext("local", "test", conf)
+ sc = new SparkContext("local", "test")
val createTaskSchedulerMethod = PrivateMethod[TaskScheduler]('createTaskScheduler)
val sched = SparkContext invokePrivate createTaskSchedulerMethod(sc, master)
sched.asInstanceOf[TaskSchedulerImpl]
@@ -44,26 +44,13 @@ class SparkContextSchedulerCreationSuite
}
test("local") {
- var conf = new SparkConf()
- conf.set("spark.cores.max", "1")
- val sched = createTaskScheduler("local", conf)
+ val sched = createTaskScheduler("local")
sched.backend match {
case s: LocalBackend => assert(s.totalCores === 1)
case _ => fail()
}
}
- test("local-cores-exceed") {
- val cores = Runtime.getRuntime.availableProcessors() + 1
- var conf = new SparkConf()
- conf.set("spark.cores.max", cores.toString)
- val sched = createTaskScheduler("local", conf)
- sched.backend match {
- case s: LocalBackend => assert(s.totalCores === Runtime.getRuntime.availableProcessors())
- case _ => fail()
- }
- }
-
test("local-n") {
val sched = createTaskScheduler("local[5]")
assert(sched.maxTaskFailures === 1)