aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-04-08 20:35:29 -0700
committerReynold Xin <rxin@databricks.com>2015-04-08 20:35:29 -0700
commit1b2aab8d5b9cc2ff702506038bd71aa8debe7ca0 (patch)
treedf9e868bcc5dbcf7e80c6711f05ccbe6a32a7004 /sql/catalyst
parent891ada5be1e7fdd796380e2626d80843f2ef6017 (diff)
downloadspark-1b2aab8d5b9cc2ff702506038bd71aa8debe7ca0.tar.gz
spark-1b2aab8d5b9cc2ff702506038bd71aa8debe7ca0.tar.bz2
spark-1b2aab8d5b9cc2ff702506038bd71aa8debe7ca0.zip
[SPARK-6765] Fix test code style for SQL
So we can turn style checker on for test code. Author: Reynold Xin <rxin@databricks.com> Closes #5412 from rxin/test-style-sql and squashes the following commits: 9098a31 [Reynold Xin] One more compilation error ... 8c7250a [Reynold Xin] Fix compilation. 82d0944 [Reynold Xin] Indentation. 0b03fbb [Reynold Xin] code review. f2f4348 [Reynold Xin] oops. ef4ec48 [Reynold Xin] Hive module. 7e0db5e [Reynold Xin] sql module 04ec7ac [Reynold Xin] catalyst module
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/DistributionSuite.scala3
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala10
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala8
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala134
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala51
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala3
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala5
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SameResultSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala8
10 files changed, 143 insertions, 83 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/DistributionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/DistributionSuite.scala
index 46b2250aab..ea82cd2622 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/DistributionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/DistributionSuite.scala
@@ -30,7 +30,7 @@ class DistributionSuite extends FunSuite {
inputPartitioning: Partitioning,
requiredDistribution: Distribution,
satisfied: Boolean) {
- if (inputPartitioning.satisfies(requiredDistribution) != satisfied)
+ if (inputPartitioning.satisfies(requiredDistribution) != satisfied) {
fail(
s"""
|== Input Partitioning ==
@@ -40,6 +40,7 @@ class DistributionSuite extends FunSuite {
|== Does input partitioning satisfy required distribution? ==
|Expected $satisfied got ${inputPartitioning.satisfies(requiredDistribution)}
""".stripMargin)
+ }
}
test("HashPartitioning is the output partitioning") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index ee7b14c7a1..6e3d6b9263 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -27,6 +27,8 @@ import org.apache.spark.sql.types._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
+import scala.collection.immutable
+
class AnalysisSuite extends FunSuite with BeforeAndAfter {
val caseSensitiveCatalog = new SimpleCatalog(true)
val caseInsensitiveCatalog = new SimpleCatalog(false)
@@ -41,10 +43,10 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
}
- def caseSensitiveAnalyze(plan: LogicalPlan) =
+ def caseSensitiveAnalyze(plan: LogicalPlan): Unit =
caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer(plan))
- def caseInsensitiveAnalyze(plan: LogicalPlan) =
+ def caseInsensitiveAnalyze(plan: LogicalPlan): Unit =
caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer(plan))
val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
@@ -147,7 +149,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
name: String,
plan: LogicalPlan,
errorMessages: Seq[String],
- caseSensitive: Boolean = true) = {
+ caseSensitive: Boolean = true): Unit = {
test(name) {
val error = intercept[AnalysisException] {
if(caseSensitive) {
@@ -202,7 +204,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
case class UnresolvedTestPlan() extends LeafNode {
override lazy val resolved = false
- override def output = Nil
+ override def output: Seq[Attribute] = Nil
}
errorTest(
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
index 70aef1cac4..fcd745f43c 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
@@ -96,7 +96,9 @@ class HiveTypeCoercionSuite extends PlanTest {
widenTest(StringType, TimestampType, None)
// ComplexType
- widenTest(NullType, MapType(IntegerType, StringType, false), Some(MapType(IntegerType, StringType, false)))
+ widenTest(NullType,
+ MapType(IntegerType, StringType, false),
+ Some(MapType(IntegerType, StringType, false)))
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
widenTest(StringType, MapType(IntegerType, StringType, true), None)
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
@@ -113,7 +115,9 @@ class HiveTypeCoercionSuite extends PlanTest {
// Remove superflous boolean -> boolean casts.
ruleTest(Cast(Literal(true), BooleanType), Literal(true))
// Stringify boolean when casting to string.
- ruleTest(Cast(Literal(false), StringType), If(Literal(false), Literal("true"), Literal("false")))
+ ruleTest(
+ Cast(Literal(false), StringType),
+ If(Literal(false), Literal("true"), Literal("false")))
}
test("coalesce casts") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 3dbefa40d2..d2b1090a0c 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -82,10 +82,13 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
assert(BitwiseNot(1.toByte).eval(EmptyRow).isInstanceOf[Byte])
}
+ // scalastyle:off
/**
* Checks for three-valued-logic. Based on:
* http://en.wikipedia.org/wiki/Null_(SQL)#Comparisons_with_NULL_and_the_three-valued_logic_.283VL.29
- * I.e. in flat cpo "False -> Unknown -> True", OR is lowest upper bound, AND is greatest lower bound.
+ * I.e. in flat cpo "False -> Unknown -> True",
+ * OR is lowest upper bound,
+ * AND is greatest lower bound.
* p q p OR q p AND q p = q
* True True True True True
* True False True False False
@@ -102,7 +105,7 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
* False True
* Unknown Unknown
*/
-
+ // scalastyle:on
val notTrueTable =
(true, false) ::
(false, true) ::
@@ -165,7 +168,9 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(In(Literal(1), Seq(Literal(1), Literal(2))), true)
checkEvaluation(In(Literal(2), Seq(Literal(1), Literal(2))), true)
checkEvaluation(In(Literal(3), Seq(Literal(1), Literal(2))), false)
- checkEvaluation(In(Literal(1), Seq(Literal(1), Literal(2))) && In(Literal(2), Seq(Literal(1), Literal(2))), true)
+ checkEvaluation(
+ In(Literal(1), Seq(Literal(1), Literal(2))) && In(Literal(2), Seq(Literal(1), Literal(2))),
+ true)
}
test("Divide") {
@@ -180,7 +185,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Divide(Literal.create(null, IntegerType), Literal(0)), null)
checkEvaluation(Divide(Literal.create(null, DoubleType), Literal(0.0)), null)
checkEvaluation(Divide(Literal.create(null, IntegerType), Literal(1)), null)
- checkEvaluation(Divide(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null)
+ checkEvaluation(Divide(Literal.create(null, IntegerType), Literal.create(null, IntegerType)),
+ null)
}
test("Remainder") {
@@ -195,7 +201,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal(0)), null)
checkEvaluation(Remainder(Literal.create(null, DoubleType), Literal(0.0)), null)
checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal(1)), null)
- checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null)
+ checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal.create(null, IntegerType)),
+ null)
}
test("INSET") {
@@ -264,7 +271,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation("ab" like regEx, true, new GenericRow(Array[Any]("a%b")))
checkEvaluation("a\nb" like regEx, true, new GenericRow(Array[Any]("a%b")))
- checkEvaluation(Literal.create(null, StringType) like regEx, null, new GenericRow(Array[Any]("bc%")))
+ checkEvaluation(Literal.create(null, StringType) like regEx, null,
+ new GenericRow(Array[Any]("bc%")))
}
test("RLIKE literal Regular Expression") {
@@ -507,8 +515,10 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
}
test("array casting") {
- val array = Literal.create(Seq("123", "abc", "", null), ArrayType(StringType, containsNull = true))
- val array_notNull = Literal.create(Seq("123", "abc", ""), ArrayType(StringType, containsNull = false))
+ val array = Literal.create(Seq("123", "abc", "", null),
+ ArrayType(StringType, containsNull = true))
+ val array_notNull = Literal.create(Seq("123", "abc", ""),
+ ArrayType(StringType, containsNull = false))
{
val cast = Cast(array, ArrayType(IntegerType, containsNull = true))
@@ -765,7 +775,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Coalesce(Literal.create(null, StringType) :: Nil), null, row)
checkEvaluation(Coalesce(Literal.create(null, StringType) :: c1 :: c2 :: Nil), "^Ba*n", row)
- checkEvaluation(If(c3, Literal.create("a", StringType), Literal.create("b", StringType)), "a", row)
+ checkEvaluation(
+ If(c3, Literal.create("a", StringType), Literal.create("b", StringType)), "a", row)
checkEvaluation(If(c3, c1, c2), "^Ba*n", row)
checkEvaluation(If(c4, c2, c1), "^Ba*n", row)
checkEvaluation(If(Literal.create(null, BooleanType), c2, c1), "^Ba*n", row)
@@ -842,18 +853,20 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(GetItem(BoundReference(3, typeMap, true),
Literal("aa")), "bb", row)
checkEvaluation(GetItem(Literal.create(null, typeMap), Literal("aa")), null, row)
- checkEvaluation(GetItem(Literal.create(null, typeMap), Literal.create(null, StringType)), null, row)
+ checkEvaluation(
+ GetItem(Literal.create(null, typeMap), Literal.create(null, StringType)), null, row)
checkEvaluation(GetItem(BoundReference(3, typeMap, true),
Literal.create(null, StringType)), null, row)
checkEvaluation(GetItem(BoundReference(4, typeArray, true),
Literal(1)), "bb", row)
checkEvaluation(GetItem(Literal.create(null, typeArray), Literal(1)), null, row)
- checkEvaluation(GetItem(Literal.create(null, typeArray), Literal.create(null, IntegerType)), null, row)
+ checkEvaluation(
+ GetItem(Literal.create(null, typeArray), Literal.create(null, IntegerType)), null, row)
checkEvaluation(GetItem(BoundReference(4, typeArray, true),
Literal.create(null, IntegerType)), null, row)
- def quickBuildGetField(expr: Expression, fieldName: String) = {
+ def quickBuildGetField(expr: Expression, fieldName: String): StructGetField = {
expr.dataType match {
case StructType(fields) =>
val field = fields.find(_.name == fieldName).get
@@ -861,7 +874,9 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
}
}
- def quickResolve(u: UnresolvedGetField) = quickBuildGetField(u.child, u.fieldName)
+ def quickResolve(u: UnresolvedGetField): StructGetField = {
+ quickBuildGetField(u.child, u.fieldName)
+ }
checkEvaluation(quickBuildGetField(BoundReference(2, typeS, nullable = true), "a"), "aa", row)
checkEvaluation(quickBuildGetField(Literal.create(null, typeS), "a"), null, row)
@@ -872,7 +887,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
)
assert(quickBuildGetField(BoundReference(2,typeS, nullable = true), "a").nullable === true)
- assert(quickBuildGetField(BoundReference(2, typeS_notNullable, nullable = false), "a").nullable === false)
+ assert(quickBuildGetField(BoundReference(2, typeS_notNullable, nullable = false), "a").nullable
+ === false)
assert(quickBuildGetField(Literal.create(null, typeS), "a").nullable === true)
assert(quickBuildGetField(Literal.create(null, typeS_notNullable), "a").nullable === true)
@@ -896,7 +912,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Add(c1, c2), 3, row)
checkEvaluation(Add(c1, Literal.create(null, IntegerType)), null, row)
checkEvaluation(Add(Literal.create(null, IntegerType), c2), null, row)
- checkEvaluation(Add(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
+ checkEvaluation(
+ Add(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(-c1, -1, row)
checkEvaluation(c1 + c2, 3, row)
@@ -919,7 +936,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Add(c1, c2), 3.1, row)
checkEvaluation(Add(c1, Literal.create(null, DoubleType)), null, row)
checkEvaluation(Add(Literal.create(null, DoubleType), c2), null, row)
- checkEvaluation(Add(Literal.create(null, DoubleType), Literal.create(null, DoubleType)), null, row)
+ checkEvaluation(
+ Add(Literal.create(null, DoubleType), Literal.create(null, DoubleType)), null, row)
checkEvaluation(-c1, -1.1, row)
checkEvaluation(c1 + c2, 3.1, row)
@@ -942,7 +960,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(LessThan(c1, c2), true, row)
checkEvaluation(LessThan(c1, Literal.create(null, IntegerType)), null, row)
checkEvaluation(LessThan(Literal.create(null, IntegerType), c2), null, row)
- checkEvaluation(LessThan(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
+ checkEvaluation(
+ LessThan(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(c1 < c2, true, row)
checkEvaluation(c1 <= c2, true, row)
@@ -985,54 +1004,84 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
val s = 'a.string.at(0)
// substring from zero position with less-than-full length
- checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)), "ex", row)
- checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(2, IntegerType)), "ex", row)
+ checkEvaluation(
+ Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)), "ex", row)
+ checkEvaluation(
+ Substring(s, Literal.create(1, IntegerType), Literal.create(2, IntegerType)), "ex", row)
// substring from zero position with full length
- checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(7, IntegerType)), "example", row)
- checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(7, IntegerType)), "example", row)
+ checkEvaluation(
+ Substring(s, Literal.create(0, IntegerType), Literal.create(7, IntegerType)), "example", row)
+ checkEvaluation(
+ Substring(s, Literal.create(1, IntegerType), Literal.create(7, IntegerType)), "example", row)
// substring from zero position with greater-than-full length
- checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(100, IntegerType)), "example", row)
- checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(100, IntegerType)), "example", row)
+ checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(100, IntegerType)),
+ "example", row)
+ checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(100, IntegerType)),
+ "example", row)
// substring from nonzero position with less-than-full length
- checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(2, IntegerType)), "xa", row)
+ checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(2, IntegerType)),
+ "xa", row)
// substring from nonzero position with full length
- checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(6, IntegerType)), "xample", row)
+ checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(6, IntegerType)),
+ "xample", row)
// substring from nonzero position with greater-than-full length
- checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(100, IntegerType)), "xample", row)
+ checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(100, IntegerType)),
+ "xample", row)
// zero-length substring (within string bounds)
- checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(0, IntegerType)), "", row)
+ checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(0, IntegerType)),
+ "", row)
// zero-length substring (beyond string bounds)
- checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)), "", row)
+ checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)),
+ "", row)
// substring(null, _, _) -> null
- checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)), null, new GenericRow(Array[Any](null)))
+ checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)),
+ null, new GenericRow(Array[Any](null)))
// substring(_, null, _) -> null
- checkEvaluation(Substring(s, Literal.create(null, IntegerType), Literal.create(4, IntegerType)), null, row)
+ checkEvaluation(Substring(s, Literal.create(null, IntegerType), Literal.create(4, IntegerType)),
+ null, row)
// substring(_, _, null) -> null
- checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(null, IntegerType)), null, row)
+ checkEvaluation(
+ Substring(s, Literal.create(100, IntegerType), Literal.create(null, IntegerType)),
+ null,
+ row)
// 2-arg substring from zero position
- checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)), "example", row)
- checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)), "example", row)
+ checkEvaluation(
+ Substring(s, Literal.create(0, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
+ "example",
+ row)
+ checkEvaluation(
+ Substring(s, Literal.create(1, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
+ "example",
+ row)
// 2-arg substring from nonzero position
- checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)), "xample", row)
+ checkEvaluation(
+ Substring(s, Literal.create(2, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
+ "xample",
+ row)
val s_notNull = 'a.string.notNull.at(0)
- assert(Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable === true)
- assert(Substring(s_notNull, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable === false)
- assert(Substring(s_notNull, Literal.create(null, IntegerType), Literal.create(2, IntegerType)).nullable === true)
- assert(Substring(s_notNull, Literal.create(0, IntegerType), Literal.create(null, IntegerType)).nullable === true)
+ assert(Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable
+ === true)
+ assert(
+ Substring(s_notNull, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable
+ === false)
+ assert(Substring(s_notNull,
+ Literal.create(null, IntegerType), Literal.create(2, IntegerType)).nullable === true)
+ assert(Substring(s_notNull,
+ Literal.create(0, IntegerType), Literal.create(null, IntegerType)).nullable === true)
checkEvaluation(s.substr(0, 2), "ex", row)
checkEvaluation(s.substr(0), "example", row)
@@ -1065,17 +1114,20 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(BitwiseAnd(c1, c4), null, row)
checkEvaluation(BitwiseAnd(c1, c2), 0, row)
checkEvaluation(BitwiseAnd(c1, Literal.create(null, IntegerType)), null, row)
- checkEvaluation(BitwiseAnd(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
+ checkEvaluation(
+ BitwiseAnd(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseOr(c1, c4), null, row)
checkEvaluation(BitwiseOr(c1, c2), 3, row)
checkEvaluation(BitwiseOr(c1, Literal.create(null, IntegerType)), null, row)
- checkEvaluation(BitwiseOr(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
+ checkEvaluation(
+ BitwiseOr(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseXor(c1, c4), null, row)
checkEvaluation(BitwiseXor(c1, c2), 3, row)
checkEvaluation(BitwiseXor(c1, Literal.create(null, IntegerType)), null, row)
- checkEvaluation(BitwiseXor(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
+ checkEvaluation(
+ BitwiseXor(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseNot(c4), null, row)
checkEvaluation(BitwiseNot(c1), -2, row)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
index a0efe9e2e7..4396bd0dda 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
@@ -176,40 +176,39 @@ class ConstantFoldingSuite extends PlanTest {
}
test("Constant folding test: expressions have null literals") {
- val originalQuery =
- testRelation
- .select(
- IsNull(Literal(null)) as 'c1,
- IsNotNull(Literal(null)) as 'c2,
+ val originalQuery = testRelation.select(
+ IsNull(Literal(null)) as 'c1,
+ IsNotNull(Literal(null)) as 'c2,
- GetItem(Literal.create(null, ArrayType(IntegerType)), 1) as 'c3,
- GetItem(Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
- UnresolvedGetField(
- Literal.create(null, StructType(Seq(StructField("a", IntegerType, true)))),
- "a") as 'c5,
+ GetItem(Literal.create(null, ArrayType(IntegerType)), 1) as 'c3,
+ GetItem(
+ Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
+ UnresolvedGetField(
+ Literal.create(null, StructType(Seq(StructField("a", IntegerType, true)))),
+ "a") as 'c5,
- UnaryMinus(Literal.create(null, IntegerType)) as 'c6,
- Cast(Literal(null), IntegerType) as 'c7,
- Not(Literal.create(null, BooleanType)) as 'c8,
+ UnaryMinus(Literal.create(null, IntegerType)) as 'c6,
+ Cast(Literal(null), IntegerType) as 'c7,
+ Not(Literal.create(null, BooleanType)) as 'c8,
- Add(Literal.create(null, IntegerType), 1) as 'c9,
- Add(1, Literal.create(null, IntegerType)) as 'c10,
+ Add(Literal.create(null, IntegerType), 1) as 'c9,
+ Add(1, Literal.create(null, IntegerType)) as 'c10,
- EqualTo(Literal.create(null, IntegerType), 1) as 'c11,
- EqualTo(1, Literal.create(null, IntegerType)) as 'c12,
+ EqualTo(Literal.create(null, IntegerType), 1) as 'c11,
+ EqualTo(1, Literal.create(null, IntegerType)) as 'c12,
- Like(Literal.create(null, StringType), "abc") as 'c13,
- Like("abc", Literal.create(null, StringType)) as 'c14,
+ Like(Literal.create(null, StringType), "abc") as 'c13,
+ Like("abc", Literal.create(null, StringType)) as 'c14,
- Upper(Literal.create(null, StringType)) as 'c15,
+ Upper(Literal.create(null, StringType)) as 'c15,
- Substring(Literal.create(null, StringType), 0, 1) as 'c16,
- Substring("abc", Literal.create(null, IntegerType), 1) as 'c17,
- Substring("abc", 0, Literal.create(null, IntegerType)) as 'c18,
+ Substring(Literal.create(null, StringType), 0, 1) as 'c16,
+ Substring("abc", Literal.create(null, IntegerType), 1) as 'c17,
+ Substring("abc", 0, Literal.create(null, IntegerType)) as 'c18,
- Contains(Literal.create(null, StringType), "abc") as 'c19,
- Contains("abc", Literal.create(null, StringType)) as 'c20
- )
+ Contains(Literal.create(null, StringType), "abc") as 'c19,
+ Contains("abc", Literal.create(null, StringType)) as 'c20
+ )
val optimized = Optimize(originalQuery.analyze)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
index 55c6766520..1448098c77 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
@@ -432,7 +432,8 @@ class FilterPushdownSuite extends PlanTest {
val originalQuery = {
z.join(x.join(y))
- .where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) && ("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
+ .where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) &&
+ ("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
}
val optimized = Optimize(originalQuery.analyze)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
index 233e329cb2..966bc9ada1 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
@@ -52,7 +52,7 @@ class OptimizeInSuite extends PlanTest {
val optimized = Optimize(originalQuery.analyze)
val correctAnswer =
testRelation
- .where(InSet(UnresolvedAttribute("a"), HashSet[Any]()+1+2))
+ .where(InSet(UnresolvedAttribute("a"), HashSet[Any]() + 1 + 2))
.analyze
comparePlans(optimized, correctAnswer)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
index 129d091ca0..e7cafcc96d 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
@@ -45,12 +45,13 @@ class PlanTest extends FunSuite {
protected def comparePlans(plan1: LogicalPlan, plan2: LogicalPlan) {
val normalized1 = normalizeExprIds(plan1)
val normalized2 = normalizeExprIds(plan2)
- if (normalized1 != normalized2)
+ if (normalized1 != normalized2) {
fail(
s"""
|== FAIL: Plans do not match ===
|${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\n")}
- """.stripMargin)
+ """.stripMargin)
+ }
}
/** Fails the test if the two expressions do not match */
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SameResultSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SameResultSuite.scala
index 11e6831b24..1273921f63 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SameResultSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SameResultSuite.scala
@@ -32,7 +32,7 @@ class SameResultSuite extends FunSuite {
val testRelation = LocalRelation('a.int, 'b.int, 'c.int)
val testRelation2 = LocalRelation('a.int, 'b.int, 'c.int)
- def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true) = {
+ def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true): Unit = {
val aAnalyzed = a.analyze
val bAnalyzed = b.analyze
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
index 274f3ede00..4eb8708335 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
@@ -25,12 +25,12 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types.{StringType, NullType}
case class Dummy(optKey: Option[Expression]) extends Expression {
- def children = optKey.toSeq
- def nullable = true
- def dataType = NullType
+ def children: Seq[Expression] = optKey.toSeq
+ def nullable: Boolean = true
+ def dataType: NullType = NullType
override lazy val resolved = true
type EvaluatedType = Any
- def eval(input: Row) = null.asInstanceOf[Any]
+ def eval(input: Row): Any = null.asInstanceOf[Any]
}
class TreeNodeSuite extends FunSuite {