diff options
author | Alex Liu <alex_liu68@yahoo.com> | 2015-01-10 13:23:09 -0800 |
---|---|---|
committer | Michael Armbrust <michael@databricks.com> | 2015-01-10 13:23:09 -0800 |
commit | 4b39fd1e63188821fc84a13f7ccb6e94277f4be7 (patch) | |
tree | 3c98f9ff0d0562c6096ff0e82189bf543ab003bd /sql/core | |
parent | 1e56eba5d906bef793dfd6f199db735a6116a764 (diff) | |
download | spark-4b39fd1e63188821fc84a13f7ccb6e94277f4be7.tar.gz spark-4b39fd1e63188821fc84a13f7ccb6e94277f4be7.tar.bz2 spark-4b39fd1e63188821fc84a13f7ccb6e94277f4be7.zip |
[SPARK-4943][SQL] Allow table name having dot for db/catalog
The pull only fixes the parsing error and changes API to use tableIdentifier. Joining different catalog datasource related change is not done in this pull.
Author: Alex Liu <alex_liu68@yahoo.com>
Closes #3941 from alexliu68/SPARK-SQL-4943-3 and squashes the following commits:
343ae27 [Alex Liu] [SPARK-4943][SQL] refactoring according to review
29e5e55 [Alex Liu] [SPARK-4943][SQL] fix failed Hive CTAS tests
6ae77ce [Alex Liu] [SPARK-4943][SQL] fix TestHive matching error
3652997 [Alex Liu] [SPARK-4943][SQL] Allow table name having dot to support db/catalog ...
Diffstat (limited to 'sql/core')
3 files changed, 7 insertions, 7 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index 6a1a4d995b..9962937277 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -276,7 +276,7 @@ class SQLContext(@transient val sparkContext: SparkContext) * @group userf */ def registerRDDAsTable(rdd: SchemaRDD, tableName: String): Unit = { - catalog.registerTable(None, tableName, rdd.queryExecution.logical) + catalog.registerTable(Seq(tableName), rdd.queryExecution.logical) } /** @@ -289,7 +289,7 @@ class SQLContext(@transient val sparkContext: SparkContext) */ def dropTempTable(tableName: String): Unit = { tryUncacheQuery(table(tableName)) - catalog.unregisterTable(None, tableName) + catalog.unregisterTable(Seq(tableName)) } /** @@ -308,7 +308,7 @@ class SQLContext(@transient val sparkContext: SparkContext) /** Returns the specified table as a SchemaRDD */ def table(tableName: String): SchemaRDD = - new SchemaRDD(this, catalog.lookupRelation(None, tableName)) + new SchemaRDD(this, catalog.lookupRelation(Seq(tableName))) /** * :: DeveloperApi :: diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala index fd5f4abcbc..3cf9209465 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala @@ -97,8 +97,8 @@ private[sql] trait SchemaRDDLike { */ @Experimental def insertInto(tableName: String, overwrite: Boolean): Unit = - sqlContext.executePlan( - InsertIntoTable(UnresolvedRelation(None, tableName), Map.empty, logicalPlan, overwrite)).toRdd + sqlContext.executePlan(InsertIntoTable(UnresolvedRelation(Seq(tableName)), + Map.empty, logicalPlan, overwrite)).toRdd /** * :: Experimental :: diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala index 1a4232dab8..c7e136388f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala @@ -302,8 +302,8 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach { upperCaseData.where('N <= 4).registerTempTable("left") upperCaseData.where('N >= 3).registerTempTable("right") - val left = UnresolvedRelation(None, "left", None) - val right = UnresolvedRelation(None, "right", None) + val left = UnresolvedRelation(Seq("left"), None) + val right = UnresolvedRelation(Seq("right"), None) checkAnswer( left.join(right, FullOuter, Some("left.N".attr === "right.N".attr)), |