aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorwangfei <wangfei1@huawei.com>2015-04-26 21:08:47 -0700
committerReynold Xin <rxin@databricks.com>2015-04-26 21:08:47 -0700
commitd188b8bad82836bf654e57f9dd4e1ddde1d530f4 (patch)
tree2fc733e81ebcd3e6e9780239317fe9f60d36d885 /sql
parentca55dc95b777d96b27d4e4c0457dd25145dcd6e9 (diff)
downloadspark-d188b8bad82836bf654e57f9dd4e1ddde1d530f4.tar.gz
spark-d188b8bad82836bf654e57f9dd4e1ddde1d530f4.tar.bz2
spark-d188b8bad82836bf654e57f9dd4e1ddde1d530f4.zip
[SQL][Minor] rename DataTypeParser.apply to DataTypeParser.parse
rename DataTypeParser.apply to DataTypeParser.parse to make it more clear and readable. /cc rxin Author: wangfei <wangfei1@huawei.com> Closes #5710 from scwf/apply and squashes the following commits: c319977 [wangfei] rename apply to parse
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeParserSuite.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Column.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala2
5 files changed, 6 insertions, 6 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
index 9c8c643f7d..4574934d91 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
@@ -92,7 +92,7 @@ object PhysicalOperation extends PredicateHelper {
}
def collectAliases(fields: Seq[Expression]): Map[Attribute, Expression] = fields.collect {
- case a @ Alias(child, _) => a.toAttribute.asInstanceOf[Attribute] -> child
+ case a @ Alias(child, _) => a.toAttribute -> child
}.toMap
def substitute(aliases: Map[Attribute, Expression])(expr: Expression): Expression = {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
index 5163f05879..04f3379afb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
@@ -108,7 +108,7 @@ private[sql] object DataTypeParser {
override val lexical = new SqlLexical
}
- def apply(dataTypeString: String): DataType = dataTypeParser.toDataType(dataTypeString)
+ def parse(dataTypeString: String): DataType = dataTypeParser.toDataType(dataTypeString)
}
/** The exception thrown from the [[DataTypeParser]]. */
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeParserSuite.scala
index 169125264a..3e7cf7cbb5 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeParserSuite.scala
@@ -23,13 +23,13 @@ class DataTypeParserSuite extends FunSuite {
def checkDataType(dataTypeString: String, expectedDataType: DataType): Unit = {
test(s"parse ${dataTypeString.replace("\n", "")}") {
- assert(DataTypeParser(dataTypeString) === expectedDataType)
+ assert(DataTypeParser.parse(dataTypeString) === expectedDataType)
}
}
def unsupported(dataTypeString: String): Unit = {
test(s"$dataTypeString is not supported") {
- intercept[DataTypeException](DataTypeParser(dataTypeString))
+ intercept[DataTypeException](DataTypeParser.parse(dataTypeString))
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
index edb229c059..33f9d0b37d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
@@ -647,7 +647,7 @@ class Column(protected[sql] val expr: Expression) extends Logging {
*
* @group expr_ops
*/
- def cast(to: String): Column = cast(DataTypeParser(to))
+ def cast(to: String): Column = cast(DataTypeParser.parse(to))
/**
* Returns an ordering used in sorting.
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index f1c0bd92aa..4d222cf88e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -871,7 +871,7 @@ private[hive] case class MetastoreRelation
private[hive] object HiveMetastoreTypes {
- def toDataType(metastoreType: String): DataType = DataTypeParser(metastoreType)
+ def toDataType(metastoreType: String): DataType = DataTypeParser.parse(metastoreType)
def toMetastoreType(dt: DataType): String = dt match {
case ArrayType(elementType, _) => s"array<${toMetastoreType(elementType)}>"