aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--project/SparkBuild.scala24
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala3
2 files changed, 26 insertions, 1 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 754d54e893..5461d25d72 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -166,6 +166,9 @@ object SparkBuild extends PomBuild {
/* Enable unidoc only for the root spark project */
enable(Unidoc.settings)(spark)
+ /* Spark SQL Core console settings */
+ enable(SQL.settings)(sql)
+
/* Hive console settings */
enable(Hive.settings)(hive)
@@ -179,6 +182,27 @@ object SparkBuild extends PomBuild {
}
+object SQL {
+
+ lazy val settings = Seq(
+
+ initialCommands in console :=
+ """
+ |import org.apache.spark.sql.catalyst.analysis._
+ |import org.apache.spark.sql.catalyst.dsl._
+ |import org.apache.spark.sql.catalyst.errors._
+ |import org.apache.spark.sql.catalyst.expressions._
+ |import org.apache.spark.sql.catalyst.plans.logical._
+ |import org.apache.spark.sql.catalyst.rules._
+ |import org.apache.spark.sql.catalyst.types._
+ |import org.apache.spark.sql.catalyst.util._
+ |import org.apache.spark.sql.execution
+ |import org.apache.spark.sql.test.TestSQLContext._
+ |import org.apache.spark.sql.parquet.ParquetTestData""".stripMargin
+ )
+
+}
+
object Hive {
lazy val settings = Seq(
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
index 0d05d9808b..616f1e2ecb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
@@ -113,11 +113,12 @@ trait OverrideCatalog extends Catalog {
alias: Option[String] = None): LogicalPlan = {
val (dbName, tblName) = processDatabaseAndTableName(databaseName, tableName)
val overriddenTable = overrides.get((dbName, tblName))
+ val tableWithQualifers = overriddenTable.map(r => Subquery(tblName, r))
// If an alias was specified by the lookup, wrap the plan in a subquery so that attributes are
// properly qualified with this alias.
val withAlias =
- overriddenTable.map(r => alias.map(a => Subquery(a, r)).getOrElse(r))
+ tableWithQualifers.map(r => alias.map(a => Subquery(a, r)).getOrElse(r))
withAlias.getOrElse(super.lookupRelation(dbName, tblName, alias))
}