aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-05-28 20:11:57 -0700
committerReynold Xin <rxin@databricks.com>2015-05-28 20:11:57 -0700
commit8da560d7de9b3c9a3e3ff197eeb10a3d7023f10d (patch)
tree6cd652c27cdb536c60cdddd816a6a1013fb5f17c
parent2881d14cbedc14f1cd8ae5078446dba1a8d39086 (diff)
downloadspark-8da560d7de9b3c9a3e3ff197eeb10a3d7023f10d.tar.gz
spark-8da560d7de9b3c9a3e3ff197eeb10a3d7023f10d.tar.bz2
spark-8da560d7de9b3c9a3e3ff197eeb10a3d7023f10d.zip
[SPARK-7927] whitespace fixes for Catalyst module.
So we can enable a whitespace enforcement rule in the style checker to save code review time. Author: Reynold Xin <rxin@databricks.com> Closes #6476 from rxin/whitespace-catalyst and squashes the following commits: 650409d [Reynold Xin] Fixed tests. 51a9e5d [Reynold Xin] [SPARK-7927] whitespace fixes for Catalyst module.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala9
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala5
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala84
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala56
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala4
32 files changed, 121 insertions, 130 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
index 2eb3e167ba..ef7b3ad943 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
@@ -103,7 +103,7 @@ class SqlLexical extends StdLexical {
( identChar ~ (identChar | digit).* ^^
{ case first ~ rest => processIdent((first :: rest).mkString) }
| rep1(digit) ~ ('.' ~> digit.*).? ^^ {
- case i ~ None => NumericLit(i.mkString)
+ case i ~ None => NumericLit(i.mkString)
case i ~ Some(d) => FloatLit(i.mkString + "." + d.mkString)
}
| '\'' ~> chrExcept('\'', '\n', EofCh).* <~ '\'' ^^
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
index fc36b9f1f2..e85312aee7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
@@ -140,7 +140,7 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
(HAVING ~> expression).? ~
sortType.? ~
(LIMIT ~> expression).? ^^ {
- case d ~ p ~ r ~ f ~ g ~ h ~ o ~ l =>
+ case d ~ p ~ r ~ f ~ g ~ h ~ o ~ l =>
val base = r.getOrElse(OneRowRelation)
val withFilter = f.map(Filter(_, base)).getOrElse(base)
val withProjection = g
@@ -212,7 +212,7 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
protected lazy val ordering: Parser[Seq[SortOrder]] =
( rep1sep(expression ~ direction.? , ",") ^^ {
- case exps => exps.map(pair => SortOrder(pair._1, pair._2.getOrElse(Ascending)))
+ case exps => exps.map(pair => SortOrder(pair._1, pair._2.getOrElse(Ascending)))
}
)
@@ -242,7 +242,7 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
| termExpression ~ NOT.? ~ (BETWEEN ~> termExpression) ~ (AND ~> termExpression) ^^ {
case e ~ not ~ el ~ eu =>
val betweenExpr: Expression = And(GreaterThanOrEqual(e, el), LessThanOrEqual(e, eu))
- not.fold(betweenExpr)(f=> Not(betweenExpr))
+ not.fold(betweenExpr)(f => Not(betweenExpr))
}
| termExpression ~ (RLIKE ~> termExpression) ^^ { case e1 ~ e2 => RLike(e1, e2) }
| termExpression ~ (REGEXP ~> termExpression) ^^ { case e1 ~ e2 => RLike(e1, e2) }
@@ -365,7 +365,7 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
protected lazy val baseExpression: Parser[Expression] =
( "*" ^^^ UnresolvedStar(None)
- | ident <~ "." ~ "*" ^^ { case tableName => UnresolvedStar(Option(tableName)) }
+ | ident <~ "." ~ "*" ^^ { case tableName => UnresolvedStar(Option(tableName)) }
| primary
)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index c239e83271..df37889eed 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -494,7 +494,7 @@ class Analyzer(
def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
case filter @ Filter(havingCondition, aggregate @ Aggregate(_, originalAggExprs, _))
if aggregate.resolved && containsAggregate(havingCondition) => {
- val evaluatedCondition = Alias(havingCondition, "havingCondition")()
+ val evaluatedCondition = Alias(havingCondition, "havingCondition")()
val aggExprsWithHaving = evaluatedCondition +: originalAggExprs
Project(aggregate.output,
@@ -515,16 +515,15 @@ class Analyzer(
* - concrete attribute references for their output.
* - to be relocated from a SELECT clause (i.e. from a [[Project]]) into a [[Generate]]).
*
- * Names for the output [[Attributes]] are extracted from [[Alias]] or [[MultiAlias]] expressions
+ * Names for the output [[Attribute]]s are extracted from [[Alias]] or [[MultiAlias]] expressions
* that wrap the [[Generator]]. If more than one [[Generator]] is found in a Project, an
* [[AnalysisException]] is throw.
*/
object ResolveGenerate extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
case p: Generate if !p.child.resolved || !p.generator.resolved => p
- case g: Generate if g.resolved == false =>
- g.copy(
- generatorOutput = makeGeneratorOutput(g.generator, g.generatorOutput.map(_.name)))
+ case g: Generate if !g.resolved =>
+ g.copy(generatorOutput = makeGeneratorOutput(g.generator, g.generatorOutput.map(_.name)))
case p @ Project(projectList, child) =>
// Holds the resolved generator, if one exists in the project list.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
index 208021c421..3e240fd55e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
@@ -140,7 +140,7 @@ class SimpleCatalog(val conf: CatalystConf) extends Catalog {
trait OverrideCatalog extends Catalog {
// TODO: This doesn't work when the database changes...
- val overrides = new mutable.HashMap[(Option[String],String), LogicalPlan]()
+ val overrides = new mutable.HashMap[(Option[String], String), LogicalPlan]()
abstract override def tableExists(tableIdentifier: Seq[String]): Boolean = {
val tableIdent = processTableIdentifier(tableIdentifier)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
index b45b17d856..44664f898f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
@@ -561,8 +561,7 @@ trait HiveTypeCoercion {
case a @ CreateArray(children) if !a.resolved =>
val commonType = a.childTypes.reduce(
- (a,b) =>
- findTightestCommonType(a,b).getOrElse(StringType))
+ (a, b) => findTightestCommonType(a, b).getOrElse(StringType))
CreateArray(
children.map(c => if (c.dataType == commonType) c else Cast(c, commonType)))
@@ -634,7 +633,7 @@ trait HiveTypeCoercion {
import HiveTypeCoercion._
def apply(plan: LogicalPlan): LogicalPlan = plan transformAllExpressions {
- case cw: CaseWhenLike if !cw.resolved && cw.childrenResolved && !cw.valueTypesEqual =>
+ case cw: CaseWhenLike if !cw.resolved && cw.childrenResolved && !cw.valueTypesEqual =>
logDebug(s"Input values for null casting ${cw.valueTypes.mkString(",")}")
val commonType = cw.valueTypes.reduce { (v1, v2) =>
findTightestCommonType(v1, v2).getOrElse(sys.error(
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index 60ab9fba48..5182175796 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -140,7 +140,7 @@ package object dsl {
// Note that if we make ExpressionConversions an object rather than a trait, we can
// then make this a value class to avoid the small penalty of runtime instantiation.
def $(args: Any*): analysis.UnresolvedAttribute = {
- analysis.UnresolvedAttribute(sc.s(args :_*))
+ analysis.UnresolvedAttribute(sc.s(args : _*))
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
index 0fd4f9b374..d2a90a50c8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
@@ -49,11 +49,4 @@ package object errors {
case e: Exception => throw new TreeNodeException(tree, msg, e)
}
}
-
- /**
- * Executes `f` which is expected to throw a
- * [[catalyst.errors.TreeNodeException TreeNodeException]]. The first tree encountered in
- * the stack of exceptions of type `TreeType` is returned.
- */
- def getTree[TreeType <: TreeNode[_]](f: => Unit): TreeType = ??? // TODO: Implement
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index df3cdf2cdf..21adac1441 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -35,48 +35,48 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w
private[this] def forceNullable(from: DataType, to: DataType) = (from, to) match {
case (StringType, _: NumericType) => true
- case (StringType, TimestampType) => true
- case (DoubleType, TimestampType) => true
- case (FloatType, TimestampType) => true
- case (StringType, DateType) => true
- case (_: NumericType, DateType) => true
- case (BooleanType, DateType) => true
- case (DateType, _: NumericType) => true
- case (DateType, BooleanType) => true
+ case (StringType, TimestampType) => true
+ case (DoubleType, TimestampType) => true
+ case (FloatType, TimestampType) => true
+ case (StringType, DateType) => true
+ case (_: NumericType, DateType) => true
+ case (BooleanType, DateType) => true
+ case (DateType, _: NumericType) => true
+ case (DateType, BooleanType) => true
case (DoubleType, _: DecimalType) => true
- case (FloatType, _: DecimalType) => true
+ case (FloatType, _: DecimalType) => true
case (_, DecimalType.Fixed(_, _)) => true // TODO: not all upcasts here can really give null
- case _ => false
+ case _ => false
}
private[this] def resolvableNullability(from: Boolean, to: Boolean) = !from || to
private[this] def resolve(from: DataType, to: DataType): Boolean = {
(from, to) match {
- case (from, to) if from == to => true
+ case (from, to) if from == to => true
- case (NullType, _) => true
+ case (NullType, _) => true
- case (_, StringType) => true
+ case (_, StringType) => true
- case (StringType, BinaryType) => true
+ case (StringType, BinaryType) => true
- case (StringType, BooleanType) => true
- case (DateType, BooleanType) => true
- case (TimestampType, BooleanType) => true
- case (_: NumericType, BooleanType) => true
+ case (StringType, BooleanType) => true
+ case (DateType, BooleanType) => true
+ case (TimestampType, BooleanType) => true
+ case (_: NumericType, BooleanType) => true
- case (StringType, TimestampType) => true
- case (BooleanType, TimestampType) => true
- case (DateType, TimestampType) => true
- case (_: NumericType, TimestampType) => true
+ case (StringType, TimestampType) => true
+ case (BooleanType, TimestampType) => true
+ case (DateType, TimestampType) => true
+ case (_: NumericType, TimestampType) => true
- case (_, DateType) => true
+ case (_, DateType) => true
- case (StringType, _: NumericType) => true
- case (BooleanType, _: NumericType) => true
- case (DateType, _: NumericType) => true
- case (TimestampType, _: NumericType) => true
+ case (StringType, _: NumericType) => true
+ case (BooleanType, _: NumericType) => true
+ case (DateType, _: NumericType) => true
+ case (TimestampType, _: NumericType) => true
case (_: NumericType, _: NumericType) => true
case (ArrayType(from, fn), ArrayType(to, tn)) =>
@@ -410,21 +410,21 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w
private[this] def cast(from: DataType, to: DataType): Any => Any = to match {
case dt if dt == child.dataType => identity[Any]
- case StringType => castToString(from)
- case BinaryType => castToBinary(from)
- case DateType => castToDate(from)
- case decimal: DecimalType => castToDecimal(from, decimal)
- case TimestampType => castToTimestamp(from)
- case BooleanType => castToBoolean(from)
- case ByteType => castToByte(from)
- case ShortType => castToShort(from)
- case IntegerType => castToInt(from)
- case FloatType => castToFloat(from)
- case LongType => castToLong(from)
- case DoubleType => castToDouble(from)
- case array: ArrayType => castArray(from.asInstanceOf[ArrayType], array)
- case map: MapType => castMap(from.asInstanceOf[MapType], map)
- case struct: StructType => castStruct(from.asInstanceOf[StructType], struct)
+ case StringType => castToString(from)
+ case BinaryType => castToBinary(from)
+ case DateType => castToDate(from)
+ case decimal: DecimalType => castToDecimal(from, decimal)
+ case TimestampType => castToTimestamp(from)
+ case BooleanType => castToBoolean(from)
+ case ByteType => castToByte(from)
+ case ShortType => castToShort(from)
+ case IntegerType => castToInt(from)
+ case FloatType => castToFloat(from)
+ case LongType => castToLong(from)
+ case DoubleType => castToDouble(from)
+ case array: ArrayType => castArray(from.asInstanceOf[ArrayType], array)
+ case map: MapType => castMap(from.asInstanceOf[MapType], map)
+ case struct: StructType => castStruct(from.asInstanceOf[StructType], struct)
}
private[this] lazy val cast: Any => Any = cast(child.dataType, dataType)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala
index b5f4e16745..a1e0819e8a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala
@@ -47,7 +47,7 @@ object ExtractValue {
case (ArrayType(StructType(fields), containsNull), Literal(fieldName, StringType)) =>
val ordinal = findField(fields, fieldName.toString, resolver)
GetArrayStructFields(child, fields(ordinal), ordinal, containsNull)
- case (_: ArrayType, _) if extraction.dataType.isInstanceOf[IntegralType] =>
+ case (_: ArrayType, _) if extraction.dataType.isInstanceOf[IntegralType] =>
GetArrayItem(child, extraction)
case (_: MapType, _) =>
GetMapValue(child, extraction)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
index 72eff5fe96..6c380d3084 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
@@ -111,7 +111,7 @@ case class MinFunction(expr: Expression, base: AggregateExpression) extends Aggr
override def update(input: Row): Unit = {
if (currentMin.value == null) {
currentMin.value = expr.eval(input)
- } else if(cmp.eval(input) == true) {
+ } else if (cmp.eval(input) == true) {
currentMin.value = expr.eval(input)
}
}
@@ -142,7 +142,7 @@ case class MaxFunction(expr: Expression, base: AggregateExpression) extends Aggr
override def update(input: Row): Unit = {
if (currentMax.value == null) {
currentMax.value = expr.eval(input)
- } else if(cmp.eval(input) == true) {
+ } else if (cmp.eval(input) == true) {
currentMax.value = expr.eval(input)
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index ecb4c4b68f..36964af68d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -373,7 +373,7 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
// Uh, bad function name...
child.castOrNull(c => q"!$c", BooleanType)
- case Add(e1, e2) => (e1, e2) evaluate { case (eval1, eval2) => q"$eval1 + $eval2" }
+ case Add(e1, e2) => (e1, e2) evaluate { case (eval1, eval2) => q"$eval1 + $eval2" }
case Subtract(e1, e2) => (e1, e2) evaluate { case (eval1, eval2) => q"$eval1 - $eval2" }
case Multiply(e1, e2) => (e1, e2) evaluate { case (eval1, eval2) => q"$eval1 * $eval2" }
case Divide(e1, e2) =>
@@ -665,7 +665,7 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
protected def defaultPrimitive(dt: DataType) = dt match {
case BooleanType => ru.Literal(Constant(false))
case FloatType => ru.Literal(Constant(-1.0.toFloat))
- case StringType => q"""org.apache.spark.sql.types.UTF8String("<uninit>")"""
+ case StringType => q"""org.apache.spark.sql.types.UTF8String("<uninit>")"""
case ShortType => ru.Literal(Constant(-1.toShort))
case LongType => ru.Literal(Constant(-1L))
case ByteType => ru.Literal(Constant(-1.toByte))
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
index 584f938445..31c63a79eb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
@@ -161,7 +161,7 @@ object GenerateProjection extends CodeGenerator[Seq[Expression], Projection] {
}
}
- val hashValues = expressions.zipWithIndex.map { case (e,i) =>
+ val hashValues = expressions.zipWithIndex.map { case (e, i) =>
val elementName = newTermName(s"c$i")
val nonNull = e.dataType match {
case BooleanType => q"if ($elementName) 0 else 1"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
index cab40feb72..634138010f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
@@ -103,8 +103,8 @@ case class Explode(child: Expression)
val inputArray = child.eval(input).asInstanceOf[Seq[Any]]
if (inputArray == null) Nil else inputArray.map(v => new GenericRow(Array(v)))
case MapType(_, _, _) =>
- val inputMap = child.eval(input).asInstanceOf[Map[Any,Any]]
- if (inputMap == null) Nil else inputMap.map { case (k,v) => new GenericRow(Array(k,v)) }
+ val inputMap = child.eval(input).asInstanceOf[Map[Any, Any]]
+ if (inputMap == null) Nil else inputMap.map { case (k, v) => new GenericRow(Array(k, v)) }
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
index 5da93fe9c6..83a44a12f0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
@@ -38,14 +38,14 @@ trait StringRegexExpression extends ExpectsInputTypes {
case _ => null
}
- protected def compile(str: String): Pattern = if(str == null) {
+ protected def compile(str: String): Pattern = if (str == null) {
null
} else {
// Let it raise exception if couldn't compile the regex string
Pattern.compile(escape(str))
}
- protected def pattern(str: String) = if(cache == null) compile(str) else cache
+ protected def pattern(str: String) = if (cache == null) compile(str) else cache
override def eval(input: Row): Any = {
val l = left.eval(input)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
index 2729b34a08..82c4d462cc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
@@ -66,7 +66,7 @@ case class WindowSpecDefinition(
}
}
- override def children: Seq[Expression] = partitionSpec ++ orderSpec
+ override def children: Seq[Expression] = partitionSpec ++ orderSpec
override lazy val resolved: Boolean =
childrenResolved && frameSpecification.isInstanceOf[SpecifiedWindowFrame]
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
index 7967189cac..eff5c61644 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
@@ -84,7 +84,7 @@ abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanTy
val newArgs = productIterator.map {
case e: Expression => transformExpressionDown(e)
case Some(e: Expression) => Some(transformExpressionDown(e))
- case m: Map[_,_] => m
+ case m: Map[_, _] => m
case d: DataType => d // Avoid unpacking Structs
case seq: Traversable[_] => seq.map {
case e: Expression => transformExpressionDown(e)
@@ -117,7 +117,7 @@ abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanTy
val newArgs = productIterator.map {
case e: Expression => transformExpressionUp(e)
case Some(e: Expression) => Some(transformExpressionUp(e))
- case m: Map[_,_] => m
+ case m: Map[_, _] => m
case d: DataType => d // Avoid unpacking Structs
case seq: Traversable[_] => seq.map {
case e: Expression => transformExpressionUp(e)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
index 01f4b6e9bb..33a9e55a47 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
@@ -93,7 +93,7 @@ case class Union(left: LogicalPlan, right: LogicalPlan) extends BinaryNode {
override lazy val resolved: Boolean =
childrenResolved &&
- left.output.zip(right.output).forall { case (l,r) => l.dataType == r.dataType }
+ left.output.zip(right.output).forall { case (l, r) => l.dataType == r.dataType }
override def statistics: Statistics = {
val sizeInBytes = left.statistics.sizeInBytes + right.statistics.sizeInBytes
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index 28e15566f0..36d005d0e1 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -254,7 +254,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] {
} else {
Some(arg)
}
- case m: Map[_,_] => m
+ case m: Map[_, _] => m
case d: DataType => d // Avoid unpacking Structs
case args: Traversable[_] => args.map {
case arg: TreeNode[_] if children contains arg =>
@@ -311,7 +311,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] {
} else {
Some(arg)
}
- case m: Map[_,_] => m
+ case m: Map[_, _] => m
case d: DataType => d // Avoid unpacking Structs
case args: Traversable[_] => args.map {
case arg: TreeNode[_] if children contains arg =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala
index 9d613a940e..07054166a5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala
@@ -83,7 +83,7 @@ package object util {
}
def resourceToString(
- resource:String,
+ resource: String,
encoding: String = "UTF-8",
classLoader: ClassLoader = Utils.getSparkClassLoader): String = {
new String(resourceToBytes(resource, classLoader), encoding)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
index a0b261649f..54604808e1 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
@@ -271,7 +271,7 @@ object DataType {
protected lazy val structField: Parser[StructField] =
("StructField(" ~> "[a-zA-Z0-9_]*".r) ~ ("," ~> dataType) ~ ("," ~> boolVal <~ ")") ^^ {
- case name ~ tpe ~ nullable =>
+ case name ~ tpe ~ nullable =>
StructField(name, tpe, nullable = nullable)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
index bbc0b661a0..7ff51db76b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
@@ -253,7 +253,7 @@ class ScalaReflectionSuite extends FunSuite {
}
assert(ArrayType(IntegerType) === typeOfObject3(Seq(1, 2, 3)))
- assert(ArrayType(ArrayType(IntegerType)) === typeOfObject3(Seq(Seq(1,2,3))))
+ assert(ArrayType(ArrayType(IntegerType)) === typeOfObject3(Seq(Seq(1, 2, 3))))
}
test("convert PrimitiveData to catalyst") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
index 890ea2a84b..9eed15952d 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
@@ -28,7 +28,7 @@ private[sql] case class TestCommand(cmd: String) extends LogicalPlan with Comman
}
private[sql] class SuperLongKeywordTestParser extends AbstractSparkSQLParser {
- protected val EXECUTE = Keyword("THISISASUPERLONGKEYWORDTEST")
+ protected val EXECUTE = Keyword("THISISASUPERLONGKEYWORDTEST")
override protected lazy val start: Parser[LogicalPlan] = set
@@ -39,7 +39,7 @@ private[sql] class SuperLongKeywordTestParser extends AbstractSparkSQLParser {
}
private[sql] class CaseInsensitiveTestParser extends AbstractSparkSQLParser {
- protected val EXECUTE = Keyword("EXECUTE")
+ protected val EXECUTE = Keyword("EXECUTE")
override protected lazy val start: Parser[LogicalPlan] = set
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 939cefb71b..fcff24ca31 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -155,7 +155,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
caseSensitive: Boolean = true): Unit = {
test(name) {
val error = intercept[AnalysisException] {
- if(caseSensitive) {
+ if (caseSensitive) {
caseSensitiveAnalyze(plan)
} else {
caseInsensitiveAnalyze(plan)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 5c4a1527c2..a14f776b1e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -43,8 +43,8 @@ class ExpressionEvaluationBaseSuite extends FunSuite {
val actual = try evaluate(expression, inputRow) catch {
case e: Exception => fail(s"Exception evaluating $expression", e)
}
- if(actual != expected) {
- val input = if(inputRow == EmptyRow) "" else s", input: $inputRow"
+ if (actual != expected) {
+ val input = if (inputRow == EmptyRow) "" else s", input: $inputRow"
fail(s"Incorrect Evaluation: $expression, actual: $actual, expected: $expected$input")
}
}
@@ -126,37 +126,37 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
}
booleanLogicTest("AND", _ && _,
- (true, true, true) ::
- (true, false, false) ::
- (true, null, null) ::
- (false, true, false) ::
+ (true, true, true) ::
+ (true, false, false) ::
+ (true, null, null) ::
+ (false, true, false) ::
(false, false, false) ::
- (false, null, false) ::
- (null, true, null) ::
- (null, false, false) ::
- (null, null, null) :: Nil)
+ (false, null, false) ::
+ (null, true, null) ::
+ (null, false, false) ::
+ (null, null, null) :: Nil)
booleanLogicTest("OR", _ || _,
- (true, true, true) ::
- (true, false, true) ::
- (true, null, true) ::
- (false, true, true) ::
+ (true, true, true) ::
+ (true, false, true) ::
+ (true, null, true) ::
+ (false, true, true) ::
(false, false, false) ::
- (false, null, null) ::
- (null, true, true) ::
- (null, false, null) ::
- (null, null, null) :: Nil)
+ (false, null, null) ::
+ (null, true, true) ::
+ (null, false, null) ::
+ (null, null, null) :: Nil)
booleanLogicTest("=", _ === _,
- (true, true, true) ::
- (true, false, false) ::
- (true, null, null) ::
- (false, true, false) ::
+ (true, true, true) ::
+ (true, false, false) ::
+ (true, null, null) ::
+ (false, true, false) ::
(false, false, true) ::
- (false, null, null) ::
- (null, true, null) ::
- (null, false, null) ::
- (null, null, null) :: Nil)
+ (false, null, null) ::
+ (null, true, null) ::
+ (null, false, null) ::
+ (null, null, null) :: Nil)
def booleanLogicTest(
name: String,
@@ -164,7 +164,7 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
truthTable: Seq[(Any, Any, Any)]) {
test(s"3VL $name") {
truthTable.foreach {
- case (l,r,answer) =>
+ case (l, r, answer) =>
val expr = op(Literal.create(l, BooleanType), Literal.create(r, BooleanType))
checkEvaluation(expr, answer)
}
@@ -928,7 +928,7 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
:: StructField("b", StringType, nullable = false) :: Nil
)
- assert(getStructField(BoundReference(2,typeS, nullable = true), "a").nullable === true)
+ assert(getStructField(BoundReference(2, typeS, nullable = true), "a").nullable === true)
assert(getStructField(BoundReference(2, typeS_notNullable, nullable = false), "a").nullable
=== false)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
index b5ebe4b38e..d7c437095e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
@@ -41,9 +41,9 @@ class GeneratedEvaluationSuite extends ExpressionEvaluationSuite {
""".stripMargin)
}
- val actual = plan(inputRow).apply(0)
- if(actual != expected) {
- val input = if(inputRow == EmptyRow) "" else s", input: $inputRow"
+ val actual = plan(inputRow).apply(0)
+ if (actual != expected) {
+ val input = if (inputRow == EmptyRow) "" else s", input: $inputRow"
fail(s"Incorrect Evaluation: $expression, actual: $actual, expected: $expected$input")
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
index 97af2e0fd0..a40324b008 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
@@ -53,7 +53,7 @@ class GeneratedMutableEvaluationSuite extends ExpressionEvaluationSuite {
""".stripMargin)
}
if (actual != expectedRow) {
- val input = if(inputRow == EmptyRow) "" else s", input: $inputRow"
+ val input = if (inputRow == EmptyRow) "" else s", input: $inputRow"
fail(s"Incorrect Evaluation: $expression, actual: $actual, expected: $expected$input")
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
index 6255578d7f..465a5e6914 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
@@ -78,9 +78,9 @@ class BooleanSimplificationSuite extends PlanTest with PredicateHelper {
test("(a && b && c && ...) || (a && b && d && ...) || (a && b && e && ...) ...") {
checkCondition('b > 3 || 'c > 5, 'b > 3 || 'c > 5)
- checkCondition(('a < 2 && 'a > 3 && 'b > 5) || 'a < 2, 'a < 2)
+ checkCondition(('a < 2 && 'a > 3 && 'b > 5) || 'a < 2, 'a < 2)
- checkCondition('a < 2 || ('a < 2 && 'a > 3 && 'b > 5), 'a < 2)
+ checkCondition('a < 2 || ('a < 2 && 'a > 3 && 'b > 5), 'a < 2)
val input = ('a === 'b && 'b > 3 && 'c > 2) ||
('a === 'b && 'c < 1 && 'a === 5) ||
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
index be33cb9bb8..ff25470bf0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
@@ -97,7 +97,7 @@ class FilterPushdownSuite extends PlanTest {
test("column pruning for Project(ne, Limit)") {
val originalQuery =
testRelation
- .select('a,'b)
+ .select('a, 'b)
.limit(2)
.select('a)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
index 3eb399e68e..11b0859d3f 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
@@ -46,7 +46,7 @@ class OptimizeInSuite extends PlanTest {
test("OptimizedIn test: In clause optimized to InSet") {
val originalQuery =
testRelation
- .where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2))))
+ .where(In(UnresolvedAttribute("a"), Seq(Literal(1), Literal(2))))
.analyze
val optimized = Optimize.execute(originalQuery.analyze)
@@ -61,13 +61,13 @@ class OptimizeInSuite extends PlanTest {
test("OptimizedIn test: In clause not optimized in case filter has attributes") {
val originalQuery =
testRelation
- .where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2), UnresolvedAttribute("b"))))
+ .where(In(UnresolvedAttribute("a"), Seq(Literal(1), Literal(2), UnresolvedAttribute("b"))))
.analyze
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
- .where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2), UnresolvedAttribute("b"))))
+ .where(In(UnresolvedAttribute("a"), Seq(Literal(1), Literal(2), UnresolvedAttribute("b"))))
.analyze
comparePlans(optimized, correctAnswer)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
index a3ad200800..35f50be46b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
@@ -33,8 +33,8 @@ class UnionPushdownSuite extends PlanTest {
UnionPushdown) :: Nil
}
- val testRelation = LocalRelation('a.int, 'b.int, 'c.int)
- val testRelation2 = LocalRelation('d.int, 'e.int, 'f.int)
+ val testRelation = LocalRelation('a.int, 'b.int, 'c.int)
+ val testRelation2 = LocalRelation('d.int, 'e.int, 'f.int)
val testUnion = Union(testRelation, testRelation2)
test("union: filter to each side") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
index e5f77dcd96..9fcfc51c96 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
@@ -91,7 +91,7 @@ class TreeNodeSuite extends FunSuite {
test("transform works on nodes with Option children") {
val dummy1 = Dummy(Some(Literal.create("1", StringType)))
val dummy2 = Dummy(None)
- val toZero: PartialFunction[Expression, Expression] = { case Literal(_, _) => Literal(0) }
+ val toZero: PartialFunction[Expression, Expression] = { case Literal(_, _) => Literal(0) }
var actual = dummy1 transformDown toZero
assert(actual === Dummy(Some(Literal(0))))
@@ -104,7 +104,7 @@ class TreeNodeSuite extends FunSuite {
}
test("preserves origin") {
- CurrentOrigin.setPosition(1,1)
+ CurrentOrigin.setPosition(1, 1)
val add = Add(Literal(1), Literal(1))
CurrentOrigin.reset()
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index a73317c869..df11982781 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -250,11 +250,11 @@ class DataTypeSuite extends FunSuite {
expected = false)
checkEqualsIgnoreCompatibleNullability(
from = MapType(StringType, ArrayType(IntegerType, true), valueContainsNull = true),
- to = MapType(StringType, ArrayType(IntegerType, false), valueContainsNull = true),
+ to = MapType(StringType, ArrayType(IntegerType, false), valueContainsNull = true),
expected = false)
checkEqualsIgnoreCompatibleNullability(
from = MapType(StringType, ArrayType(IntegerType, false), valueContainsNull = true),
- to = MapType(StringType, ArrayType(IntegerType, true), valueContainsNull = true),
+ to = MapType(StringType, ArrayType(IntegerType, true), valueContainsNull = true),
expected = true)