diff options
author | Michael Armbrust <michael@databricks.com> | 2014-06-12 23:09:41 -0700 |
---|---|---|
committer | Michael Armbrust <michael@databricks.com> | 2014-06-12 23:10:08 -0700 |
commit | 57526e40a52323ffccfc79193c04eccdc60e4653 (patch) | |
tree | 8ecb76dff2a1f1f5721a9ef203ddc46b7e644f5e /sql/hive/src/main | |
parent | 3962abaf93217eced5856d28ad6dc02f8b653e98 (diff) | |
download | spark-57526e40a52323ffccfc79193c04eccdc60e4653.tar.gz spark-57526e40a52323ffccfc79193c04eccdc60e4653.tar.bz2 spark-57526e40a52323ffccfc79193c04eccdc60e4653.zip |
[SPARK-2135][SQL] Use planner for in-memory scans
Author: Michael Armbrust <michael@databricks.com>
Closes #1072 from marmbrus/cachedStars and squashes the following commits:
8757c8e [Michael Armbrust] Use planner for in-memory scans.
(cherry picked from commit 13f8cfdc04589b986554310965e83fe658085683)
Signed-off-by: Michael Armbrust <michael@databricks.com>
Diffstat (limited to 'sql/hive/src/main')
3 files changed, 9 insertions, 6 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index 6497821554..9cd13f6ae0 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -230,6 +230,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) { CommandStrategy(self), TakeOrdered, ParquetOperations, + InMemoryScans, HiveTableScans, DataSinks, Scripts, diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala index a91b520765..e9e6497f7e 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala @@ -36,7 +36,7 @@ import org.apache.spark.sql.catalyst.rules._ import org.apache.spark.sql.catalyst.types._ import org.apache.spark.sql.execution.SparkLogicalPlan import org.apache.spark.sql.hive.execution.{HiveTableScan, InsertIntoHiveTable} -import org.apache.spark.sql.columnar.InMemoryColumnarTableScan +import org.apache.spark.sql.columnar.{InMemoryRelation, InMemoryColumnarTableScan} /* Implicit conversions */ import scala.collection.JavaConversions._ @@ -130,8 +130,9 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with case p @ InsertIntoTable(table: MetastoreRelation, _, child, _) => castChildOutput(p, table, child) - case p @ logical.InsertIntoTable(SparkLogicalPlan(InMemoryColumnarTableScan( - _, HiveTableScan(_, table, _), _)), _, child, _) => + case p @ logical.InsertIntoTable( + InMemoryRelation(_, _, + HiveTableScan(_, table, _)), _, child, _) => castChildOutput(p, table, child) } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala index b2157074a4..d1aa8c868c 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala @@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.execution._ import org.apache.spark.sql.hive.execution._ -import org.apache.spark.sql.columnar.InMemoryColumnarTableScan +import org.apache.spark.sql.columnar.InMemoryRelation private[hive] trait HiveStrategies { // Possibly being too clever with types here... or not clever enough. @@ -44,8 +44,9 @@ private[hive] trait HiveStrategies { def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match { case logical.InsertIntoTable(table: MetastoreRelation, partition, child, overwrite) => InsertIntoHiveTable(table, partition, planLater(child), overwrite)(hiveContext) :: Nil - case logical.InsertIntoTable(SparkLogicalPlan(InMemoryColumnarTableScan( - _, HiveTableScan(_, table, _), _)), partition, child, overwrite) => + case logical.InsertIntoTable( + InMemoryRelation(_, _, + HiveTableScan(_, table, _)), partition, child, overwrite) => InsertIntoHiveTable(table, partition, planLater(child), overwrite)(hiveContext) :: Nil case _ => Nil } |