aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorYin Huai <huai@cse.ohio-state.edu>2014-07-07 17:01:44 -0700
committerMichael Armbrust <michael@databricks.com>2014-07-07 17:01:59 -0700
commite522971e81efd3a7ec4a39b20082b890d11caa42 (patch)
tree9935dc2b381eeb5787bd3e8c2044811d6cfeaed3 /sql/hive
parent5044ba60a92495124b97ee97b87a45ce46d6073e (diff)
downloadspark-e522971e81efd3a7ec4a39b20082b890d11caa42.tar.gz
spark-e522971e81efd3a7ec4a39b20082b890d11caa42.tar.bz2
spark-e522971e81efd3a7ec4a39b20082b890d11caa42.zip
[SPARK-2339][SQL] SQL parser in sql-core is case sensitive, but a table alias is converted to lower case when we create Subquery
Reported by http://apache-spark-user-list.1001560.n3.nabble.com/Spark-SQL-Join-throws-exception-td8599.html After we get the table from the catalog, because the table has an alias, we will temporarily insert a Subquery. Then, we convert the table alias to lower case no matter if the parser is case sensitive or not. To see the issue ... ``` val sqlContext = new org.apache.spark.sql.SQLContext(sc) import sqlContext.createSchemaRDD case class Person(name: String, age: Int) val people = sc.textFile("examples/src/main/resources/people.txt").map(_.split(",")).map(p => Person(p(0), p(1).trim.toInt)) people.registerAsTable("people") sqlContext.sql("select PEOPLE.name from people PEOPLE") ``` The plan is ... ``` == Query Plan == Project ['PEOPLE.name] ExistingRdd [name#0,age#1], MapPartitionsRDD[4] at mapPartitions at basicOperators.scala:176 ``` You can find that `PEOPLE.name` is not resolved. This PR introduces three changes. 1. If a table has an alias, the catalog will not lowercase the alias. If a lowercase alias is needed, the analyzer will do the work. 2. A catalog has a new val caseSensitive that indicates if this catalog is case sensitive or not. For example, a SimpleCatalog is case sensitive, but 3. Corresponding unit tests. With this PR, case sensitivity of database names and table names is handled by the catalog. Case sensitivity of other identifiers are handled by the analyzer. JIRA: https://issues.apache.org/jira/browse/SPARK-2339 Author: Yin Huai <huai@cse.ohio-state.edu> Closes #1317 from yhuai/SPARK-2339 and squashes the following commits: 12d8006 [Yin Huai] Handling case sensitivity correctly. This patch introduces three changes. 1. If a table has an alias, the catalog will not lowercase the alias. If a lowercase alias is needed, the analyzer will do the work. 2. A catalog has a new val caseSensitive that indicates if this catalog is case sensitive or not. For example, a SimpleCatalog is case sensitive, but 3. Corresponding unit tests. With this patch, case sensitivity of database names and table names is handled by the catalog. Case sensitivity of other identifiers is handled by the analyzer. (cherry picked from commit c0b4cf097de50eb2c4b0f0e67da53ee92efc1f77) Signed-off-by: Michael Armbrust <michael@databricks.com>
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala23
-rw-r--r--sql/hive/src/test/resources/golden/case sensitivity: Hive table-0-5d14d21a239daa42b086cc895215009a14
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala16
3 files changed, 44 insertions, 9 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index b3dba14519..28ccd6db89 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -46,12 +46,15 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
val client = Hive.get(hive.hiveconf)
+ val caseSensitive: Boolean = false
+
def lookupRelation(
db: Option[String],
tableName: String,
alias: Option[String]): LogicalPlan = {
- val databaseName = db.getOrElse(hive.sessionState.getCurrentDatabase)
- val table = client.getTable(databaseName, tableName)
+ val (dbName, tblName) = processDatabaseAndTableName(db, tableName)
+ val databaseName = dbName.getOrElse(hive.sessionState.getCurrentDatabase)
+ val table = client.getTable(databaseName, tblName)
val partitions: Seq[Partition] =
if (table.isPartitioned) {
client.getAllPartitionsForPruner(table).toSeq
@@ -61,8 +64,8 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
// Since HiveQL is case insensitive for table names we make them all lowercase.
MetastoreRelation(
- databaseName.toLowerCase,
- tableName.toLowerCase,
+ databaseName,
+ tblName,
alias)(table.getTTable, partitions.map(part => part.getTPartition))
}
@@ -71,7 +74,8 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
tableName: String,
schema: Seq[Attribute],
allowExisting: Boolean = false): Unit = {
- val table = new Table(databaseName, tableName)
+ val (dbName, tblName) = processDatabaseAndTableName(databaseName, tableName)
+ val table = new Table(dbName, tblName)
val hiveSchema =
schema.map(attr => new FieldSchema(attr.name, toMetastoreType(attr.dataType), ""))
table.setFields(hiveSchema)
@@ -86,7 +90,7 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
sd.setInputFormat("org.apache.hadoop.mapred.TextInputFormat")
sd.setOutputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat")
val serDeInfo = new SerDeInfo()
- serDeInfo.setName(tableName)
+ serDeInfo.setName(tblName)
serDeInfo.setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe")
serDeInfo.setParameters(Map[String, String]())
sd.setSerdeInfo(serDeInfo)
@@ -105,13 +109,14 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
object CreateTables extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
case InsertIntoCreatedTable(db, tableName, child) =>
- val databaseName = db.getOrElse(hive.sessionState.getCurrentDatabase)
+ val (dbName, tblName) = processDatabaseAndTableName(db, tableName)
+ val databaseName = dbName.getOrElse(hive.sessionState.getCurrentDatabase)
- createTable(databaseName, tableName, child.output)
+ createTable(databaseName, tblName, child.output)
InsertIntoTable(
EliminateAnalysisOperators(
- lookupRelation(Some(databaseName), tableName, None)),
+ lookupRelation(Some(databaseName), tblName, None)),
Map.empty,
child,
overwrite = false)
diff --git a/sql/hive/src/test/resources/golden/case sensitivity: Hive table-0-5d14d21a239daa42b086cc895215009a b/sql/hive/src/test/resources/golden/case sensitivity: Hive table-0-5d14d21a239daa42b086cc895215009a
new file mode 100644
index 0000000000..4d7127c0fa
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/case sensitivity: Hive table-0-5d14d21a239daa42b086cc895215009a
@@ -0,0 +1,14 @@
+0 val_0
+4 val_4
+12 val_12
+8 val_8
+0 val_0
+0 val_0
+10 val_10
+5 val_5
+11 val_11
+5 val_5
+2 val_2
+12 val_12
+5 val_5
+9 val_9
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 9f1cd70310..a623d29b53 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -210,6 +210,22 @@ class HiveQuerySuite extends HiveComparisonTest {
}
}
+ createQueryTest("case sensitivity: Hive table",
+ "SELECT srcalias.KEY, SRCALIAS.value FROM sRc SrCAlias WHERE SrCAlias.kEy < 15")
+
+ test("case sensitivity: registered table") {
+ val testData: SchemaRDD =
+ TestHive.sparkContext.parallelize(
+ TestData(1, "str1") ::
+ TestData(2, "str2") :: Nil)
+ testData.registerAsTable("REGisteredTABle")
+
+ assertResult(Array(Array(2, "str2"))) {
+ hql("SELECT tablealias.A, TABLEALIAS.b FROM reGisteredTABle TableAlias " +
+ "WHERE TableAliaS.a > 1").collect()
+ }
+ }
+
def isExplanation(result: SchemaRDD) = {
val explanation = result.select('plan).collect().map { case Row(plan: String) => plan }
explanation.size > 1 && explanation.head.startsWith("Physical execution plan")