diff options
author | Michael Armbrust <michael@databricks.com> | 2014-09-13 16:08:04 -0700 |
---|---|---|
committer | Michael Armbrust <michael@databricks.com> | 2014-09-13 16:08:04 -0700 |
commit | 0f8c4edf4e750e3d11da27cc22c40b0489da7f37 (patch) | |
tree | 7370c6c3634d0e76b74cc3443779bfea6c98f106 /sql/core/src/main | |
parent | 74049249abb952ad061c0e221c22ff894a9e9c8d (diff) | |
download | spark-0f8c4edf4e750e3d11da27cc22c40b0489da7f37.tar.gz spark-0f8c4edf4e750e3d11da27cc22c40b0489da7f37.tar.bz2 spark-0f8c4edf4e750e3d11da27cc22c40b0489da7f37.zip |
[SQL] Decrease partitions when testing
Author: Michael Armbrust <michael@databricks.com>
Closes #2164 from marmbrus/shufflePartitions and squashes the following commits:
0da1e8c [Michael Armbrust] test hax
ef2d985 [Michael Armbrust] more test hacks.
2dabae3 [Michael Armbrust] more test fixes
0bdbf21 [Michael Armbrust] Make parquet tests less order dependent
b42eeab [Michael Armbrust] increase test parallelism
80453d5 [Michael Armbrust] Decrease partitions when testing
Diffstat (limited to 'sql/core/src/main')
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala | 9 |
1 files changed, 7 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala index f2389f8f05..265b67737c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala @@ -18,8 +18,13 @@ package org.apache.spark.sql.test import org.apache.spark.{SparkConf, SparkContext} -import org.apache.spark.sql.SQLContext +import org.apache.spark.sql.{SQLConf, SQLContext} /** A SQLContext that can be used for local testing. */ object TestSQLContext - extends SQLContext(new SparkContext("local", "TestSQLContext", new SparkConf())) + extends SQLContext(new SparkContext("local[2]", "TestSQLContext", new SparkConf())) { + + /** Fewer partitions to speed up testing. */ + override private[spark] def numShufflePartitions: Int = + getConf(SQLConf.SHUFFLE_PARTITIONS, "5").toInt +} |