aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2015-12-28 17:22:18 -0800
committerReynold Xin <rxin@databricks.com>2015-12-28 17:22:18 -0800
commit043135819c487abe9657c11006ce468a6e1f262e (patch)
tree2c773572bd31fce438bc2f548cb5806c10f96dae
parent124a3a5e4eece3aabca44fbdd2f8c4c086d6eec3 (diff)
downloadspark-043135819c487abe9657c11006ce468a6e1f262e.tar.gz
spark-043135819c487abe9657c11006ce468a6e1f262e.tar.bz2
spark-043135819c487abe9657c11006ce468a6e1f262e.zip
[SPARK-12522][SQL][MINOR] Add the missing document strings for the SQL configuration
Fixing the missing the document for the configuration. We can see the missing messages "TODO" when issuing the command "SET -V". ``` spark.sql.columnNameOfCorruptRecord spark.sql.hive.verifyPartitionPath spark.sql.sources.parallelPartitionDiscovery.threshold spark.sql.hive.convertMetastoreParquet.mergeSchema spark.sql.hive.convertCTAS spark.sql.hive.thriftServer.async ``` Author: gatorsmile <gatorsmile@gmail.com> Closes #10471 from gatorsmile/commandDesc.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala8
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala9
3 files changed, 11 insertions, 8 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
index 3d81926285..b58a373991 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
@@ -334,7 +334,8 @@ private[spark] object SQLConf {
val HIVE_VERIFY_PARTITION_PATH = booleanConf("spark.sql.hive.verifyPartitionPath",
defaultValue = Some(false),
- doc = "<TODO>")
+ doc = "When true, check all the partition paths under the table\'s root directory " +
+ "when reading data stored in HDFS.")
val HIVE_METASTORE_PARTITION_PRUNING = booleanConf("spark.sql.hive.metastorePartitionPruning",
defaultValue = Some(false),
@@ -352,7 +353,7 @@ private[spark] object SQLConf {
val COLUMN_NAME_OF_CORRUPT_RECORD = stringConf("spark.sql.columnNameOfCorruptRecord",
defaultValue = Some("_corrupt_record"),
- doc = "<TODO>")
+ doc = "The name of internal column for storing raw/un-parsed JSON records that fail to parse.")
val BROADCAST_TIMEOUT = intConf("spark.sql.broadcastTimeout",
defaultValue = Some(5 * 60),
@@ -413,7 +414,8 @@ private[spark] object SQLConf {
val PARALLEL_PARTITION_DISCOVERY_THRESHOLD = intConf(
key = "spark.sql.sources.parallelPartitionDiscovery.threshold",
defaultValue = Some(32),
- doc = "<TODO>")
+ doc = "The degree of parallelism for schema merging and partition discovery of " +
+ "Parquet data sources.")
// Whether to perform eager analysis when constructing a dataframe.
// Set to false when debugging requires the ability to look at invalid query plans.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index e2dc13d66c..6ec4cadeeb 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -148,8 +148,6 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm
}
(keyValueOutput, runFunc)
- (keyValueOutput, runFunc)
-
case Some((SQLConf.Deprecated.SORTMERGE_JOIN, Some(value))) =>
val runFunc = (sqlContext: SQLContext) => {
logWarning(
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 0eeb62ca2c..384ea211df 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -692,11 +692,14 @@ private[hive] object HiveContext {
val CONVERT_METASTORE_PARQUET_WITH_SCHEMA_MERGING = booleanConf(
"spark.sql.hive.convertMetastoreParquet.mergeSchema",
defaultValue = Some(false),
- doc = "TODO")
+ doc = "When true, also tries to merge possibly different but compatible Parquet schemas in " +
+ "different Parquet data files. This configuration is only effective " +
+ "when \"spark.sql.hive.convertMetastoreParquet\" is true.")
val CONVERT_CTAS = booleanConf("spark.sql.hive.convertCTAS",
defaultValue = Some(false),
- doc = "TODO")
+ doc = "When true, a table created by a Hive CTAS statement (no USING clause) will be " +
+ "converted to a data source table, using the data source set by spark.sql.sources.default.")
val HIVE_METASTORE_SHARED_PREFIXES = stringSeqConf("spark.sql.hive.metastore.sharedPrefixes",
defaultValue = Some(jdbcPrefixes),
@@ -717,7 +720,7 @@ private[hive] object HiveContext {
val HIVE_THRIFT_SERVER_ASYNC = booleanConf("spark.sql.hive.thriftServer.async",
defaultValue = Some(true),
- doc = "TODO")
+ doc = "When set to true, Hive Thrift server executes SQL queries in an asynchronous way.")
/** Constructs a configuration for hive, where the metastore is located in a temp directory. */
def newTemporaryConfiguration(useInMemoryDerby: Boolean): Map[String, String] = {