aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorSandeep <sandeep@techaddict.me>2014-04-10 15:04:13 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-04-10 15:04:13 -0700
commit930b70f0523e96fe01c1317ef7fad1b76b36d4d9 (patch)
treefba70b8897f6c5ae1123e4717d8efdb4d4b0acc4 /sql/catalyst
parentf0466625200842f3cc486e9aa1caa417586be533 (diff)
downloadspark-930b70f0523e96fe01c1317ef7fad1b76b36d4d9.tar.gz
spark-930b70f0523e96fe01c1317ef7fad1b76b36d4d9.tar.bz2
spark-930b70f0523e96fe01c1317ef7fad1b76b36d4d9.zip
Remove Unnecessary Whitespace's
stack these together in a commit else they show up chunk by chunk in different commits. Author: Sandeep <sandeep@techaddict.me> Closes #380 from techaddict/white_space and squashes the following commits: b58f294 [Sandeep] Remove Unnecessary Whitespace's
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala28
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala10
5 files changed, 30 insertions, 30 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index 17118499d0..1f3fab09e9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -28,7 +28,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
override def toString = s"CAST($child, $dataType)"
type EvaluatedType = Any
-
+
def nullOrCast[T](a: Any, func: T => Any): Any = if(a == null) {
null
} else {
@@ -40,7 +40,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
case BinaryType => nullOrCast[Array[Byte]](_, new String(_, "UTF-8"))
case _ => nullOrCast[Any](_, _.toString)
}
-
+
// BinaryConverter
def castToBinary: Any => Any = child.dataType match {
case StringType => nullOrCast[String](_, _.getBytes("UTF-8"))
@@ -58,7 +58,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
case DoubleType => nullOrCast[Double](_, _ != 0)
case FloatType => nullOrCast[Float](_, _ != 0)
}
-
+
// TimestampConverter
def castToTimestamp: Any => Any = child.dataType match {
case StringType => nullOrCast[String](_, s => {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 8a1db8e796..dd9332ada8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -86,7 +86,7 @@ abstract class Expression extends TreeNode[Expression] {
}
/**
- * Evaluation helper function for 2 Numeric children expressions. Those expressions are supposed
+ * Evaluation helper function for 2 Numeric children expressions. Those expressions are supposed
* to be in the same data type, and also the return type.
* Either one of the expressions result is null, the evaluation result should be null.
*/
@@ -120,7 +120,7 @@ abstract class Expression extends TreeNode[Expression] {
}
/**
- * Evaluation helper function for 2 Fractional children expressions. Those expressions are
+ * Evaluation helper function for 2 Fractional children expressions. Those expressions are
* supposed to be in the same data type, and also the return type.
* Either one of the expressions result is null, the evaluation result should be null.
*/
@@ -153,7 +153,7 @@ abstract class Expression extends TreeNode[Expression] {
}
/**
- * Evaluation helper function for 2 Integral children expressions. Those expressions are
+ * Evaluation helper function for 2 Integral children expressions. Those expressions are
* supposed to be in the same data type, and also the return type.
* Either one of the expressions result is null, the evaluation result should be null.
*/
@@ -186,12 +186,12 @@ abstract class Expression extends TreeNode[Expression] {
}
/**
- * Evaluation helper function for 2 Comparable children expressions. Those expressions are
+ * Evaluation helper function for 2 Comparable children expressions. Those expressions are
* supposed to be in the same data type, and the return type should be Integer:
* Negative value: 1st argument less than 2nd argument
* Zero: 1st argument equals 2nd argument
* Positive value: 1st argument greater than 2nd argument
- *
+ *
* Either one of the expressions result is null, the evaluation result should be null.
*/
@inline
@@ -213,7 +213,7 @@ abstract class Expression extends TreeNode[Expression] {
null
} else {
e1.dataType match {
- case i: NativeType =>
+ case i: NativeType =>
f.asInstanceOf[(Ordering[i.JvmType], i.JvmType, i.JvmType) => Boolean](
i.ordering, evalE1.asInstanceOf[i.JvmType], evalE2.asInstanceOf[i.JvmType])
case other => sys.error(s"Type $other does not support ordered operations")
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
index a27c71db1b..ddc16ce87b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
@@ -28,19 +28,19 @@ trait StringRegexExpression {
self: BinaryExpression =>
type EvaluatedType = Any
-
+
def escape(v: String): String
def matches(regex: Pattern, str: String): Boolean
-
+
def nullable: Boolean = true
def dataType: DataType = BooleanType
-
- // try cache the pattern for Literal
+
+ // try cache the pattern for Literal
private lazy val cache: Pattern = right match {
case x @ Literal(value: String, StringType) => compile(value)
case _ => null
}
-
+
protected def compile(str: String): Pattern = if(str == null) {
null
} else {
@@ -49,7 +49,7 @@ trait StringRegexExpression {
}
protected def pattern(str: String) = if(cache == null) compile(str) else cache
-
+
override def eval(input: Row): Any = {
val l = left.eval(input)
if (l == null) {
@@ -73,11 +73,11 @@ trait StringRegexExpression {
/**
* Simple RegEx pattern matching function
*/
-case class Like(left: Expression, right: Expression)
+case class Like(left: Expression, right: Expression)
extends BinaryExpression with StringRegexExpression {
-
+
def symbol = "LIKE"
-
+
// replace the _ with .{1} exactly match 1 time of any character
// replace the % with .*, match 0 or more times with any character
override def escape(v: String) = {
@@ -98,19 +98,19 @@ case class Like(left: Expression, right: Expression)
sb.append(Pattern.quote(Character.toString(n)));
}
}
-
+
i += 1
}
-
+
sb.toString()
}
-
+
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).matches()
}
-case class RLike(left: Expression, right: Expression)
+case class RLike(left: Expression, right: Expression)
extends BinaryExpression with StringRegexExpression {
-
+
def symbol = "RLIKE"
override def escape(v: String): String = v
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).find(0)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
index cdeb01a965..da34bd3a21 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
@@ -55,9 +55,9 @@ case object BooleanType extends NativeType {
case object TimestampType extends NativeType {
type JvmType = Timestamp
-
+
@transient lazy val tag = typeTag[JvmType]
-
+
val ordering = new Ordering[JvmType] {
def compare(x: Timestamp, y: Timestamp) = x.compareTo(y)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 888a19d79f..2cd0d2b0e1 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -144,7 +144,7 @@ class ExpressionEvaluationSuite extends FunSuite {
checkEvaluation("abc" like "b%", false)
checkEvaluation("abc" like "bc%", false)
}
-
+
test("LIKE Non-literal Regular Expression") {
val regEx = 'a.string.at(0)
checkEvaluation("abcd" like regEx, null, new GenericRow(Array[Any](null)))
@@ -164,7 +164,7 @@ class ExpressionEvaluationSuite extends FunSuite {
test("RLIKE literal Regular Expression") {
checkEvaluation("abdef" rlike "abdef", true)
checkEvaluation("abbbbc" rlike "a.*c", true)
-
+
checkEvaluation("fofo" rlike "^fo", true)
checkEvaluation("fo\no" rlike "^fo\no$", true)
checkEvaluation("Bn" rlike "^Ba*n", true)
@@ -196,9 +196,9 @@ class ExpressionEvaluationSuite extends FunSuite {
evaluate("abbbbc" rlike regEx, new GenericRow(Array[Any]("**")))
}
}
-
+
test("data type casting") {
-
+
val sts = "1970-01-01 00:00:01.0"
val ts = Timestamp.valueOf(sts)
@@ -236,7 +236,7 @@ class ExpressionEvaluationSuite extends FunSuite {
checkEvaluation("23" cast ShortType, 23)
checkEvaluation("2012-12-11" cast DoubleType, null)
checkEvaluation(Literal(123) cast IntegerType, 123)
-
+
intercept[Exception] {evaluate(Literal(1) cast BinaryType, null)}
}