aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-04-22 11:18:01 -0700
committerReynold Xin <rxin@databricks.com>2015-04-22 11:18:01 -0700
commitcdf0328684f70ddcd49b23c23c1532aeb9caa44e (patch)
tree7829448993c87df6aed3ab97ea88d6dc9cc814c2 /sql/hive
parent33b85620f910c404873d362d27cca1223084913a (diff)
downloadspark-cdf0328684f70ddcd49b23c23c1532aeb9caa44e.tar.gz
spark-cdf0328684f70ddcd49b23c23c1532aeb9caa44e.tar.bz2
spark-cdf0328684f70ddcd49b23c23c1532aeb9caa44e.zip
[SQL] Rename some apply functions.
I was looking at the code gen code and got confused by a few of use cases of apply, in particular apply on objects. So I went ahead and changed a few of them. Hopefully slightly more clear with a proper verb. Author: Reynold Xin <rxin@databricks.com> Closes #5624 from rxin/apply-rename and squashes the following commits: ee45034 [Reynold Xin] [SQL] Rename some apply functions.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala2
4 files changed, 6 insertions, 6 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index c4a73b3004..dd06b2620c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -93,7 +93,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
if (conf.dialect == "sql") {
super.sql(substituted)
} else if (conf.dialect == "hiveql") {
- val ddlPlan = ddlParserWithHiveQL(sqlText, exceptionOnError = false)
+ val ddlPlan = ddlParserWithHiveQL.parse(sqlText, exceptionOnError = false)
DataFrame(this, ddlPlan.getOrElse(HiveQl.parseSql(substituted)))
} else {
sys.error(s"Unsupported SQL dialect: ${conf.dialect}. Try 'sql' or 'hiveql'")
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 85061f2277..0ea6d57b81 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -144,7 +144,7 @@ private[hive] object HiveQl {
protected val hqlParser = {
val fallback = new ExtendedHiveQlParser
- new SparkSQLParser(fallback(_))
+ new SparkSQLParser(fallback.parse(_))
}
/**
@@ -240,7 +240,7 @@ private[hive] object HiveQl {
/** Returns a LogicalPlan for a given HiveQL string. */
- def parseSql(sql: String): LogicalPlan = hqlParser(sql)
+ def parseSql(sql: String): LogicalPlan = hqlParser.parse(sql)
val errorRegEx = "line (\\d+):(\\d+) (.*)".r
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index a6f4fbe8ab..be9249a8b1 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -119,9 +119,9 @@ private[hive] trait HiveStrategies {
val inputData = new GenericMutableRow(relation.partitionKeys.size)
val pruningCondition =
if (codegenEnabled) {
- GeneratePredicate(castedPredicate)
+ GeneratePredicate.generate(castedPredicate)
} else {
- InterpretedPredicate(castedPredicate)
+ InterpretedPredicate.create(castedPredicate)
}
val partitions = relation.hiveQlPartitions.filter { part =>
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index 6570fa1043..9f17bca083 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -185,7 +185,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
logDebug(s"Query references test tables: ${referencedTestTables.mkString(", ")}")
referencedTestTables.foreach(loadTestTable)
// Proceed with analysis.
- analyzer(logical)
+ analyzer.execute(logical)
}
}