From 8dc3987d095ae01ad80c89b8f052f231e0807990 Mon Sep 17 00:00:00 2001 From: Reynold Xin Date: Sat, 30 Apr 2016 01:32:00 -0700 Subject: [SPARK-15028][SQL] Remove HiveSessionState.setDefaultOverrideConfs ## What changes were proposed in this pull request? This patch removes some code that are no longer relevant -- mainly HiveSessionState.setDefaultOverrideConfs. ## How was this patch tested? N/A Author: Reynold Xin Closes #12806 from rxin/SPARK-15028. --- .../main/scala/org/apache/spark/SparkConf.scala | 2 +- .../scala/org/apache/spark/sql/SQLContext.scala | 23 ++++------------------ .../apache/spark/sql/execution/debug/package.scala | 6 +++--- .../org/apache/spark/sql/DataFrameSuite.scala | 15 -------------- .../scala/org/apache/spark/sql/SQLQuerySuite.scala | 6 +----- .../apache/spark/sql/hive/HiveSessionState.scala | 10 ---------- .../org/apache/spark/sql/hive/test/TestHive.scala | 3 --- 7 files changed, 9 insertions(+), 56 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index e671a3e95a..33ed0d5493 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -455,7 +455,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { } // Validate spark.executor.extraJavaOptions - getOption(executorOptsKey).map { javaOpts => + getOption(executorOptsKey).foreach { javaOpts => if (javaOpts.contains("-Dspark")) { val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts'). " + "Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit." diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index ff633cf837..168ac7e04b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -44,8 +44,10 @@ import org.apache.spark.sql.types._ import org.apache.spark.sql.util.ExecutionListenerManager /** - * The entry point for working with structured data (rows and columns) in Spark. Allows the - * creation of [[DataFrame]] objects as well as the execution of SQL queries. + * The entry point for working with structured data (rows and columns) in Spark, in Spark 1.x. + * + * As of Spark 2.0, this is replaced by [[SparkSession]]. However, we are keeping the class here + * for backward compatibility. * * @groupname basic Basic Operations * @groupname ddl_ops Persistent Catalog DDL @@ -165,23 +167,6 @@ class SQLContext private[sql]( sparkSession.conf.get(key) } - /** - * Return the value of Spark SQL configuration property for the given key. If the key is not set - * yet, return `defaultValue` in [[ConfigEntry]]. - */ - private[sql] def getConf[T](entry: ConfigEntry[T]): T = { - sparkSession.conf.get(entry) - } - - /** - * Return the value of Spark SQL configuration property for the given key. If the key is not set - * yet, return `defaultValue`. This is useful when `defaultValue` in ConfigEntry is not the - * desired one. - */ - private[sql] def getConf[T](entry: ConfigEntry[T], defaultValue: T): T = { - sparkSession.conf.get(entry, defaultValue) - } - /** * Return the value of Spark SQL configuration property for the given key. If the key is not set * yet, return `defaultValue`. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala index 5b96ab10c9..c77c889a1b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala @@ -68,11 +68,11 @@ package object debug { } /** - * Augments [[SQLContext]] with debug methods. + * Augments [[SparkSession]] with debug methods. */ - implicit class DebugSQLContext(sqlContext: SQLContext) { + implicit class DebugSQLContext(sparkSession: SparkSession) { def debug(): Unit = { - sqlContext.setConf(SQLConf.DATAFRAME_EAGER_ANALYSIS, false) + sparkSession.conf.set(SQLConf.DATAFRAME_EAGER_ANALYSIS.key, false) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index f10d8372ed..80a93ee6d4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -66,21 +66,6 @@ class DataFrameSuite extends QueryTest with SharedSQLContext { Row(1, 1) :: Nil) } - ignore("invalid plan toString, debug mode") { - // Turn on debug mode so we can see invalid query plans. - import org.apache.spark.sql.execution.debug._ - - withSQLConf(SQLConf.DATAFRAME_EAGER_ANALYSIS.key -> "true") { - sqlContext.debug() - - val badPlan = testData.select('badColumn) - - assert(badPlan.toString contains badPlan.queryExecution.toString, - "toString on bad query plans should include the query execution but was:\n" + - badPlan.toString) - } - } - test("access complex data") { assert(complexData.filter(complexData("a").getItem(0) === 2).count() == 1) assert(complexData.filter(complexData("m").getItem("1") === 1).count() == 1) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 5065e5b80b..ec5163b658 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -1495,15 +1495,11 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { } test("SPARK-4699 case sensitivity SQL query") { - val orig = sqlContext.getConf(SQLConf.CASE_SENSITIVE) - try { - sqlContext.setConf(SQLConf.CASE_SENSITIVE, false) + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { val data = TestData(1, "val_1") :: TestData(2, "val_2") :: Nil val rdd = sparkContext.parallelize((0 to 1).map(i => data(i))) rdd.toDF().registerTempTable("testTable1") checkAnswer(sql("SELECT VALUE FROM TESTTABLE1 where KEY = 1"), Row("val_1")) - } finally { - sqlContext.setConf(SQLConf.CASE_SENSITIVE, orig) } } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala index f3076912cb..57aa4b2931 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala @@ -44,8 +44,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession) */ lazy val metadataHive: HiveClient = sharedState.metadataHive.newSession() - setDefaultOverrideConfs() - /** * Internal catalog for managing table and database states. */ @@ -108,14 +106,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession) // Helper methods, partially leftover from pre-2.0 days // ------------------------------------------------------ - /** - * Overrides default Hive configurations to avoid breaking changes to Spark SQL users. - * - allow SQL11 keywords to be used as identifiers - */ - def setDefaultOverrideConfs(): Unit = { - conf.setConfString(ConfVars.HIVE_SUPPORT_SQL11_RESERVED_KEYWORDS.varname, "false") - } - override def addJar(path: String): Unit = { metadataHive.addJar(path) super.addJar(path) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala index 93646a45a2..b41d882ffa 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -432,9 +432,6 @@ private[hive] class TestHiveSparkSession( // Lots of tests fail if we do not change the partition whitelist from the default. sessionState.metadataHive.runSqlHive("set hive.metastore.partition.name.whitelist.pattern=.*") - // In case a test changed any of these values, restore all the original ones here. - sessionState.setDefaultOverrideConfs() - sessionState.catalog.setCurrentDatabase("default") } catch { case e: Exception => -- cgit v1.2.3