aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2017-02-07 18:55:19 +0100
committerReynold Xin <rxin@databricks.com>2017-02-07 18:55:19 +0100
commitb7277e03d1038e2a19495c0ef7707e2d77937ccf (patch)
tree2472f54a41aaa8fd47e27ae1641b57c1f0477141 /sql/hive
parent7a7ce272fe9a703f58b0180a9d2001ecb5c4b8db (diff)
downloadspark-b7277e03d1038e2a19495c0ef7707e2d77937ccf.tar.gz
spark-b7277e03d1038e2a19495c0ef7707e2d77937ccf.tar.bz2
spark-b7277e03d1038e2a19495c0ef7707e2d77937ccf.zip
[SPARK-19495][SQL] Make SQLConf slightly more extensible
## What changes were proposed in this pull request? This pull request makes SQLConf slightly more extensible by removing the visibility limitations on the build* functions. ## How was this patch tested? N/A - there are no logic changes and everything should be covered by existing unit tests. Author: Reynold Xin <rxin@databricks.com> Closes #16835 from rxin/SPARK-19495.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala18
1 files changed, 9 insertions, 9 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala
index 2822a55e3d..30abc62803 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala
@@ -69,18 +69,18 @@ private[spark] object HiveUtils extends Logging {
*/
val hiveTypeString: String = "HIVE_TYPE_STRING"
- val HIVE_METASTORE_VERSION = SQLConfigBuilder("spark.sql.hive.metastore.version")
+ val HIVE_METASTORE_VERSION = buildConf("spark.sql.hive.metastore.version")
.doc("Version of the Hive metastore. Available options are " +
s"<code>0.12.0</code> through <code>$hiveExecutionVersion</code>.")
.stringConf
.createWithDefault(hiveExecutionVersion)
- val HIVE_EXECUTION_VERSION = SQLConfigBuilder("spark.sql.hive.version")
+ val HIVE_EXECUTION_VERSION = buildConf("spark.sql.hive.version")
.doc("Version of Hive used internally by Spark SQL.")
.stringConf
.createWithDefault(hiveExecutionVersion)
- val HIVE_METASTORE_JARS = SQLConfigBuilder("spark.sql.hive.metastore.jars")
+ val HIVE_METASTORE_JARS = buildConf("spark.sql.hive.metastore.jars")
.doc(s"""
| Location of the jars that should be used to instantiate the HiveMetastoreClient.
| This property can be one of three options: "
@@ -96,28 +96,28 @@ private[spark] object HiveUtils extends Logging {
.stringConf
.createWithDefault("builtin")
- val CONVERT_METASTORE_PARQUET = SQLConfigBuilder("spark.sql.hive.convertMetastoreParquet")
+ val CONVERT_METASTORE_PARQUET = buildConf("spark.sql.hive.convertMetastoreParquet")
.doc("When set to false, Spark SQL will use the Hive SerDe for parquet tables instead of " +
"the built in support.")
.booleanConf
.createWithDefault(true)
val CONVERT_METASTORE_PARQUET_WITH_SCHEMA_MERGING =
- SQLConfigBuilder("spark.sql.hive.convertMetastoreParquet.mergeSchema")
+ buildConf("spark.sql.hive.convertMetastoreParquet.mergeSchema")
.doc("When true, also tries to merge possibly different but compatible Parquet schemas in " +
"different Parquet data files. This configuration is only effective " +
"when \"spark.sql.hive.convertMetastoreParquet\" is true.")
.booleanConf
.createWithDefault(false)
- val CONVERT_METASTORE_ORC = SQLConfigBuilder("spark.sql.hive.convertMetastoreOrc")
+ val CONVERT_METASTORE_ORC = buildConf("spark.sql.hive.convertMetastoreOrc")
.internal()
.doc("When set to false, Spark SQL will use the Hive SerDe for ORC tables instead of " +
"the built in support.")
.booleanConf
.createWithDefault(false)
- val HIVE_METASTORE_SHARED_PREFIXES = SQLConfigBuilder("spark.sql.hive.metastore.sharedPrefixes")
+ val HIVE_METASTORE_SHARED_PREFIXES = buildConf("spark.sql.hive.metastore.sharedPrefixes")
.doc("A comma separated list of class prefixes that should be loaded using the classloader " +
"that is shared between Spark SQL and a specific version of Hive. An example of classes " +
"that should be shared is JDBC drivers that are needed to talk to the metastore. Other " +
@@ -130,7 +130,7 @@ private[spark] object HiveUtils extends Logging {
private def jdbcPrefixes = Seq(
"com.mysql.jdbc", "org.postgresql", "com.microsoft.sqlserver", "oracle.jdbc")
- val HIVE_METASTORE_BARRIER_PREFIXES = SQLConfigBuilder("spark.sql.hive.metastore.barrierPrefixes")
+ val HIVE_METASTORE_BARRIER_PREFIXES = buildConf("spark.sql.hive.metastore.barrierPrefixes")
.doc("A comma separated list of class prefixes that should explicitly be reloaded for each " +
"version of Hive that Spark SQL is communicating with. For example, Hive UDFs that are " +
"declared in a prefix that typically would be shared (i.e. <code>org.apache.spark.*</code>).")
@@ -138,7 +138,7 @@ private[spark] object HiveUtils extends Logging {
.toSequence
.createWithDefault(Nil)
- val HIVE_THRIFT_SERVER_ASYNC = SQLConfigBuilder("spark.sql.hive.thriftServer.async")
+ val HIVE_THRIFT_SERVER_ASYNC = buildConf("spark.sql.hive.thriftServer.async")
.doc("When set to true, Hive Thrift server executes SQL queries in an asynchronous way.")
.booleanConf
.createWithDefault(true)