aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-12-05 08:15:30 +0800
committerReynold Xin <rxin@databricks.com>2015-12-05 08:15:30 +0800
commitb7204e1d41271d2e8443484371770936664350b1 (patch)
tree3b09d003dce3b482282e3ae21b893fe57e607128 /sql
parentd64806b37373c5cc4fd158a9f5005743bd00bf28 (diff)
downloadspark-b7204e1d41271d2e8443484371770936664350b1.tar.gz
spark-b7204e1d41271d2e8443484371770936664350b1.tar.bz2
spark-b7204e1d41271d2e8443484371770936664350b1.zip
[SPARK-12112][BUILD] Upgrade to SBT 0.13.9
We should upgrade to SBT 0.13.9, since this is a requirement in order to use SBT's new Maven-style resolution features (which will be done in a separate patch, because it's blocked by some binary compatibility issues in the POM reader plugin). I also upgraded Scalastyle to version 0.8.0, which was necessary in order to fix a Scala 2.10.5 compatibility issue (see https://github.com/scalastyle/scalastyle/issues/156). The newer Scalastyle is slightly stricter about whitespace surrounding tokens, so I fixed the new style violations. Author: Josh Rosen <joshrosen@databricks.com> Closes #10112 from JoshRosen/upgrade-to-sbt-0.13.9.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala8
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala2
5 files changed, 9 insertions, 9 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
index ab77a76448..a98e16c253 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
@@ -734,7 +734,7 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
val complex = Literal.create(
Row(
Seq("123", "true", "f"),
- Map("a" ->"123", "b" -> "true", "c" -> "f"),
+ Map("a" -> "123", "b" -> "true", "c" -> "f"),
Row(0)),
StructType(Seq(
StructField("a",
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala
index fdd745f48e..bb3e278697 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala
@@ -862,9 +862,9 @@ private[sql] object ParquetRelation extends Logging {
// The parquet compression short names
val shortParquetCompressionCodecNames = Map(
- "NONE" -> CompressionCodecName.UNCOMPRESSED,
+ "NONE" -> CompressionCodecName.UNCOMPRESSED,
"UNCOMPRESSED" -> CompressionCodecName.UNCOMPRESSED,
- "SNAPPY" -> CompressionCodecName.SNAPPY,
- "GZIP" -> CompressionCodecName.GZIP,
- "LZO" -> CompressionCodecName.LZO)
+ "SNAPPY" -> CompressionCodecName.SNAPPY,
+ "GZIP" -> CompressionCodecName.GZIP,
+ "LZO" -> CompressionCodecName.LZO)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
index 34dd96929e..706ff1f998 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
@@ -35,7 +35,7 @@ class ColumnTypeSuite extends SparkFunSuite with Logging {
test("defaultSize") {
val checks = Map(
- NULL-> 0, BOOLEAN -> 1, BYTE -> 1, SHORT -> 2, INT -> 4, LONG -> 8,
+ NULL -> 0, BOOLEAN -> 1, BYTE -> 1, SHORT -> 2, INT -> 4, LONG -> 8,
FLOAT -> 4, DOUBLE -> 8, COMPACT_DECIMAL(15, 10) -> 8, LARGE_DECIMAL(20, 10) -> 12,
STRING -> 8, BINARY -> 16, STRUCT_TYPE -> 20, ARRAY_TYPE -> 16, MAP_TYPE -> 32)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
index ce3affba55..95642e93ae 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
@@ -100,11 +100,11 @@ class RunLengthEncodingSuite extends SparkFunSuite {
}
test(s"$RunLengthEncoding with $typeName: simple case") {
- skeleton(2, Seq(0 -> 2, 1 ->2))
+ skeleton(2, Seq(0 -> 2, 1 -> 2))
}
test(s"$RunLengthEncoding with $typeName: run length == 1") {
- skeleton(2, Seq(0 -> 1, 1 ->1))
+ skeleton(2, Seq(0 -> 1, 1 -> 1))
}
test(s"$RunLengthEncoding with $typeName: single long run") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
index 4f2cad19bf..4339f7260d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
@@ -116,7 +116,7 @@ class SQLMetricsSuite extends SparkFunSuite with SharedSQLContext {
// PhysicalRDD(nodeId = 1) -> Project(nodeId = 0)
val df = person.select('name)
testSparkPlanMetrics(df, 1, Map(
- 0L ->("Project", Map(
+ 0L -> ("Project", Map(
"number of rows" -> 2L)))
)
}