aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlex Liu <alex_liu68@yahoo.com>2014-08-20 16:14:06 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-20 16:14:06 -0700
commitd9e94146a6e65be110a62e3bd0351148912a41d1 (patch)
tree85ce735e7505b25f557c97a814fa744eb38e30a4
parenta1e8b1bc973bc0517681c09e5a5a475c0f395d31 (diff)
downloadspark-d9e94146a6e65be110a62e3bd0351148912a41d1.tar.gz
spark-d9e94146a6e65be110a62e3bd0351148912a41d1.tar.bz2
spark-d9e94146a6e65be110a62e3bd0351148912a41d1.zip
[SPARK-2846][SQL] Add configureInputJobPropertiesForStorageHandler to initialization of job conf
...al job conf Author: Alex Liu <alex_liu68@yahoo.com> Closes #1927 from alexliu68/SPARK-SQL-2846 and squashes the following commits: e4bdc4c [Alex Liu] SPARK-SQL-2846 add configureInputJobPropertiesForStorageHandler to initial job conf
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala3
1 files changed, 2 insertions, 1 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
index 82c88280d7..329f80cad4 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
@@ -22,7 +22,7 @@ import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants._
import org.apache.hadoop.hive.ql.exec.Utilities
import org.apache.hadoop.hive.ql.metadata.{Partition => HivePartition, Table => HiveTable}
-import org.apache.hadoop.hive.ql.plan.TableDesc
+import org.apache.hadoop.hive.ql.plan.{PlanUtils, TableDesc}
import org.apache.hadoop.hive.serde2.Deserializer
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector
@@ -249,6 +249,7 @@ private[hive] object HadoopTableReader extends HiveInspectors {
def initializeLocalJobConfFunc(path: String, tableDesc: TableDesc)(jobConf: JobConf) {
FileInputFormat.setInputPaths(jobConf, path)
if (tableDesc != null) {
+ PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc)
Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf)
}
val bufferSize = System.getProperty("spark.buffer.size", "65536")