aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
diff options
context:
space:
mode:
Diffstat (limited to 'sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala36
1 files changed, 14 insertions, 22 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
index e5f02caabc..69e3358d4e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
@@ -44,14 +44,19 @@ private[sql] class SessionState(ctx: SQLContext) {
lazy val experimentalMethods = new ExperimentalMethods
/**
- * Internal catalog for managing table and database states.
+ * Internal catalog for managing functions registered by the user.
*/
- lazy val catalog = new SessionCatalog(ctx.externalCatalog, conf)
+ lazy val functionRegistry: FunctionRegistry = FunctionRegistry.builtin.copy()
/**
- * Internal catalog for managing functions registered by the user.
+ * Internal catalog for managing table and database states.
*/
- lazy val functionRegistry: FunctionRegistry = FunctionRegistry.builtin.copy()
+ lazy val catalog =
+ new SessionCatalog(
+ ctx.externalCatalog,
+ ctx.functionResourceLoader,
+ functionRegistry,
+ conf)
/**
* Interface exposed to the user for registering user-defined functions.
@@ -62,9 +67,8 @@ private[sql] class SessionState(ctx: SQLContext) {
* Logical query plan analyzer for resolving unresolved attributes and relations.
*/
lazy val analyzer: Analyzer = {
- new Analyzer(catalog, functionRegistry, conf) {
+ new Analyzer(catalog, conf) {
override val extendedResolutionRules =
- python.ExtractPythonUDFs ::
PreInsertCastAndRename ::
DataSourceAnalysis ::
(if (conf.runSQLOnFile) new ResolveDataSource(ctx) :: Nil else Nil)
@@ -81,25 +85,13 @@ private[sql] class SessionState(ctx: SQLContext) {
/**
* Parser that extracts expressions, plans, table identifiers etc. from SQL texts.
*/
- lazy val sqlParser: ParserInterface = new SparkQl(conf)
+ lazy val sqlParser: ParserInterface = SparkSqlParser
/**
* Planner that converts optimized logical plans to physical plans.
*/
- lazy val planner: SparkPlanner = new SparkPlanner(ctx.sparkContext, conf, experimentalMethods)
-
- /**
- * Prepares a planned [[SparkPlan]] for execution by inserting shuffle operations and internal
- * row format conversions as needed.
- */
- lazy val prepareForExecution = new RuleExecutor[SparkPlan] {
- override val batches: Seq[Batch] = Seq(
- Batch("Subquery", Once, PlanSubqueries(SessionState.this)),
- Batch("Add exchange", Once, EnsureRequirements(conf)),
- Batch("Whole stage codegen", Once, CollapseCodegenStages(conf)),
- Batch("Reuse duplicated exchanges", Once, ReuseExchange(conf))
- )
- }
+ def planner: SparkPlanner =
+ new SparkPlanner(ctx.sparkContext, conf, experimentalMethods.extraStrategies)
/**
* An interface to register custom [[org.apache.spark.sql.util.QueryExecutionListener]]s
@@ -111,5 +103,5 @@ private[sql] class SessionState(ctx: SQLContext) {
* Interface to start and stop [[org.apache.spark.sql.ContinuousQuery]]s.
*/
lazy val continuousQueryManager: ContinuousQueryManager = new ContinuousQueryManager(ctx)
-
}
+