From af32f4aed650ba7acb381b98f3487e889e96f8c9 Mon Sep 17 00:00:00 2001 From: Yin Huai Date: Fri, 29 Apr 2016 14:54:40 -0700 Subject: [SPARK-15013][SQL] Remove hiveConf from HiveSessionState ## What changes were proposed in this pull request? The hiveConf in HiveSessionState is not actually used anymore. Let's remove it. ## How was this patch tested? Existing tests Author: Yin Huai Closes #12786 from yhuai/removeHiveConf. --- .../apache/spark/sql/hive/HiveSessionState.scala | 26 ---------------------- .../org/apache/spark/sql/hive/test/TestHive.scala | 5 +---- 2 files changed, 1 insertion(+), 30 deletions(-) (limited to 'sql') diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala index 9608f0b4ef..b17a88b2ef 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala @@ -45,31 +45,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession) */ lazy val metadataHive: HiveClient = sharedState.metadataHive.newSession() - /** - * SQLConf and HiveConf contracts: - * - * 1. create a new o.a.h.hive.ql.session.SessionState for each HiveContext - * 2. when the Hive session is first initialized, params in HiveConf will get picked up by the - * SQLConf. Additionally, any properties set by set() or a SET command inside sql() will be - * set in the SQLConf *as well as* in the HiveConf. - */ - lazy val hiveconf: HiveConf = { - val initialConf = new HiveConf( - sparkSession.sparkContext.hadoopConfiguration, - classOf[org.apache.hadoop.hive.ql.session.SessionState]) - - // HiveConf is a Hadoop Configuration, which has a field of classLoader and - // the initial value will be the current thread's context class loader - // (i.e. initClassLoader at here). - // We call initialConf.setClassLoader(initClassLoader) at here to make - // this action explicit. - initialConf.setClassLoader(sparkSession.sharedState.jarClassLoader) - sparkSession.sparkContext.conf.getAll.foreach { case (k, v) => - initialConf.set(k, v) - } - initialConf - } - setDefaultOverrideConfs() /** @@ -145,7 +120,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession) override def setConf(key: String, value: String): Unit = { super.setConf(key, value) metadataHive.runSqlHive(s"SET $key=$value") - hiveconf.set(key, value) } override def addJar(path: String): Unit = { diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala index c4a3a74b9b..e763b63380 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -153,9 +153,6 @@ private[hive] class TestHiveSparkSession( // By clearing the port we force Spark to pick a new one. This allows us to rerun tests // without restarting the JVM. System.clearProperty("spark.hostPort") - CommandProcessorFactory.clean(sessionState.hiveconf) - - sessionState.hiveconf.set("hive.plan.serialization.format", "javaXML") // For some hive test case which contain ${system:test.tmp.dir} System.setProperty("test.tmp.dir", Utils.createTempDir().getCanonicalPath) @@ -423,7 +420,7 @@ private[hive] class TestHiveSparkSession( foreach { udfName => FunctionRegistry.unregisterTemporaryUDF(udfName) } // Some tests corrupt this value on purpose, which breaks the RESET call below. - sessionState.hiveconf.set("fs.default.name", new File(".").toURI.toString) + sessionState.conf.setConfString("fs.default.name", new File(".").toURI.toString) // It is important that we RESET first as broken hooks that might have been set could break // other sql exec here. sessionState.metadataHive.runSqlHive("RESET") -- cgit v1.2.3