aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/v0.12.0
diff options
context:
space:
mode:
authorjeanlyn <jeanlyn92@gmail.com>2015-03-25 17:47:45 -0700
committerMichael Armbrust <michael@databricks.com>2015-03-25 17:47:45 -0700
commite6d1406abd55bc24477eb8c6ee72c31e7110435e (patch)
tree08e906ee3236109118a2815160d85cb4b87aa0c9 /sql/hive/v0.12.0
parent8c3b0052f4792d97d23244ade335676e37cb1fae (diff)
downloadspark-e6d1406abd55bc24477eb8c6ee72c31e7110435e.tar.gz
spark-e6d1406abd55bc24477eb8c6ee72c31e7110435e.tar.bz2
spark-e6d1406abd55bc24477eb8c6ee72c31e7110435e.zip
[SPARK-5498][SQL]fix query exception when partition schema does not match table schema
In hive,the schema of partition may be difference from the table schema.When we use spark-sql to query the data of partition which schema is difference from the table schema,we will get the exceptions as the description of the [jira](https://issues.apache.org/jira/browse/SPARK-5498) .For example: * We take a look of the schema for the partition and the table ```sql DESCRIBE partition_test PARTITION (dt='1'); id int None name string None dt string None # Partition Information # col_name data_type comment dt string None ``` ``` DESCRIBE partition_test; OK id bigint None name string None dt string None # Partition Information # col_name data_type comment dt string None ``` * run the sql ```sql SELECT * FROM partition_test where dt='1'; ``` we will get the cast exception `java.lang.ClassCastException: org.apache.spark.sql.catalyst.expressions.MutableLong cannot be cast to org.apache.spark.sql.catalyst.expressions.MutableInt` Author: jeanlyn <jeanlyn92@gmail.com> Closes #4289 from jeanlyn/schema and squashes the following commits: 9c8da74 [jeanlyn] fix style b41d6b9 [jeanlyn] fix compile errors 07d84b6 [jeanlyn] Merge branch 'master' into schema 535b0b6 [jeanlyn] reduce conflicts d6c93c5 [jeanlyn] fix bug 1e8b30c [jeanlyn] fix code style 0549759 [jeanlyn] fix code style c879aa1 [jeanlyn] clean the code 2a91a87 [jeanlyn] add more test case and clean the code 12d800d [jeanlyn] fix code style 63d170a [jeanlyn] fix compile problem 7470901 [jeanlyn] reduce conflicts afc7da5 [jeanlyn] make getConvertedOI compatible between 0.12.0 and 0.13.1 b1527d5 [jeanlyn] fix type mismatch 10744ca [jeanlyn] Insert a space after the start of the comment 3b27af3 [jeanlyn] SPARK-5498:fix bug when query the data when partition schema does not match table schema
Diffstat (limited to 'sql/hive/v0.12.0')
-rw-r--r--sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala10
1 files changed, 8 insertions, 2 deletions
diff --git a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
index 30646ddbc2..0ed93c2c5b 100644
--- a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
+++ b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.ql.plan.{CreateTableDesc, FileSinkDesc, TableDesc}
import org.apache.hadoop.hive.ql.processors._
import org.apache.hadoop.hive.ql.stats.StatsSetupConst
import org.apache.hadoop.hive.serde2.{ColumnProjectionUtils, Deserializer, io => hiveIo}
-import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, PrimitiveObjectInspector}
+import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspectorConverters, ObjectInspector, PrimitiveObjectInspector}
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory
import org.apache.hadoop.hive.serde2.objectinspector.primitive.{HiveDecimalObjectInspector, PrimitiveObjectInspectorFactory}
import org.apache.hadoop.hive.serde2.typeinfo.{TypeInfo, TypeInfoFactory}
@@ -210,7 +210,7 @@ private[hive] object HiveShim {
def getDataLocationPath(p: Partition) = p.getPartitionPath
- def getAllPartitionsOf(client: Hive, tbl: Table) = client.getAllPartitionsForPruner(tbl)
+ def getAllPartitionsOf(client: Hive, tbl: Table) = client.getAllPartitionsForPruner(tbl)
def compatibilityBlackList = Seq(
"decimal_.*",
@@ -244,6 +244,12 @@ private[hive] object HiveShim {
}
}
+ def getConvertedOI(
+ inputOI: ObjectInspector,
+ outputOI: ObjectInspector): ObjectInspector = {
+ ObjectInspectorConverters.getConvertedOI(inputOI, outputOI, true)
+ }
+
def prepareWritable(w: Writable): Writable = {
w
}