aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-03-02 23:07:48 +0100
committerHerman van Hovell <hvanhovell@questtec.nl>2016-03-02 23:07:48 +0100
commit9e01fe2ed1e834710f4ee6a02864ab0fcc528fef (patch)
tree3ce2c80db1220f785c03105cbda3fbaa78eb2b92 /sql
parentd6969ffc0f86c8a4ea0e94d06cb227178b000962 (diff)
downloadspark-9e01fe2ed1e834710f4ee6a02864ab0fcc528fef.tar.gz
spark-9e01fe2ed1e834710f4ee6a02864ab0fcc528fef.tar.bz2
spark-9e01fe2ed1e834710f4ee6a02864ab0fcc528fef.zip
[SPARK-13535][SQL] Fix Analysis Exceptions when Using Backticks in Transform Clause
#### What changes were proposed in this pull request? ```SQL FROM (FROM test SELECT TRANSFORM(key, value) USING 'cat' AS (`thing1` int, thing2 string)) t SELECT thing1 + 1 ``` This query returns an analysis error, like: ``` Failed to analyze query: org.apache.spark.sql.AnalysisException: cannot resolve '`thing1`' given input columns: [`thing1`, thing2]; line 3 pos 7 'Project [unresolvedalias(('thing1 + 1), None)] +- SubqueryAlias t +- ScriptTransformation [key#2,value#3], cat, [`thing1`#6,thing2#7], HiveScriptIOSchema(List(),List(),Some(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe),Some(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe),List((field.delim, )),List((field.delim, )),Some(org.apache.hadoop.hive.ql.exec.TextRecordReader),Some(org.apache.hadoop.hive.ql.exec.TextRecordWriter),false) +- SubqueryAlias test +- Project [_1#0 AS key#2,_2#1 AS value#3] +- LocalRelation [_1#0,_2#1], [[1,1],[2,2],[3,3],[4,4],[5,5]] ``` The backpacks of \`thing1\` should be cleaned before entering Parser/Analyzer. This PR fixes this issue. #### How was this patch tested? Added a test case and modified an existing test case Author: gatorsmile <gatorsmile@gmail.com> Closes #11415 from gatorsmile/scriptTransform.
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala2
3 files changed, 12 insertions, 4 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 5801051353..812e1fe51a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -583,11 +583,11 @@ private[hive] class HiveQl(conf: ParserConf) extends SparkQl(conf) with Logging
val (output, schemaLess) = outputClause match {
case Token("TOK_ALIASLIST", aliases) :: Nil =>
- (aliases.map { case Token(name, Nil) => AttributeReference(name, StringType)() },
- false)
+ (aliases.map { case Token(name, Nil) =>
+ AttributeReference(cleanIdentifier(name), StringType)() }, false)
case Token("TOK_TABCOLLIST", attributes) :: Nil =>
(attributes.map { case Token("TOK_TABCOL", Token(name, Nil) :: dataType :: Nil) =>
- AttributeReference(name, nodeToDataType(dataType))() }, false)
+ AttributeReference(cleanIdentifier(name), nodeToDataType(dataType))() }, false)
case Nil =>
(List(AttributeReference("key", StringType)(),
AttributeReference("value", StringType)()), true)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
index e869c0e2bd..efd33f5941 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
@@ -200,4 +200,12 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
assert(plan.children.head.asInstanceOf[Generate].generator.isInstanceOf[JsonTuple])
}
+
+ test("use backticks in output of Script Transform") {
+ val plan = parser.parsePlan(
+ """SELECT `t`.`thing1`
+ |FROM (SELECT TRANSFORM (`parquet_t1`.`key`, `parquet_t1`.`value`)
+ |USING 'cat' AS (`thing1` int, `thing2` string) FROM `default`.`parquet_t1`) AS t
+ """.stripMargin)
+ }
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index ff1719eaf6..e478bcd0ed 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -731,7 +731,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
data.toDF("key", "value").registerTempTable("test")
checkAnswer(
sql("""FROM
- |(FROM test SELECT TRANSFORM(key, value) USING 'cat' AS (thing1 int, thing2 string)) t
+ |(FROM test SELECT TRANSFORM(key, value) USING 'cat' AS (`thing1` int, thing2 string)) t
|SELECT thing1 + 1
""".stripMargin), (2 to 6).map(i => Row(i)))
}