aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2017-01-23 01:21:44 -0800
committergatorsmile <gatorsmile@gmail.com>2017-01-23 01:21:44 -0800
commitc4a6519c44f29950ef3d706a4f79e006ec8bc6b5 (patch)
tree3c4e0f91fef01902d2e7265eb5f27104d93e92f4 /sql/core/src/test
parentf067acefabebf04939d03a639a2aaa654e1bc8f9 (diff)
downloadspark-c4a6519c44f29950ef3d706a4f79e006ec8bc6b5.tar.gz
spark-c4a6519c44f29950ef3d706a4f79e006ec8bc6b5.tar.bz2
spark-c4a6519c44f29950ef3d706a4f79e006ec8bc6b5.zip
[SPARK-19218][SQL] Fix SET command to show a result correctly and in a sorted order
## What changes were proposed in this pull request? This PR aims to fix the following two things. 1. `sql("SET -v").collect()` or `sql("SET -v").show()` raises the following exceptions for String configuration with default value, `null`. For the test, please see [Jenkins result](https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/71539/testReport/) and https://github.com/apache/spark/commit/60953bf1f1ba144e709fdae3903a390ff9479fd0 in #16624 . ``` sbt.ForkMain$ForkError: java.lang.RuntimeException: Error while decoding: java.lang.NullPointerException createexternalrow(input[0, string, false].toString, input[1, string, false].toString, input[2, string, false].toString, StructField(key,StringType,false), StructField(value,StringType,false), StructField(meaning,StringType,false)) :- input[0, string, false].toString : +- input[0, string, false] :- input[1, string, false].toString : +- input[1, string, false] +- input[2, string, false].toString +- input[2, string, false] ``` 2. Currently, `SET` and `SET -v` commands show unsorted result. We had better show a sorted result for UX. Also, this is compatible with Hive. **BEFORE** ``` scala> sql("set").show(false) ... |spark.driver.host |10.22.16.140 | |spark.driver.port |63893 | |spark.repl.class.uri |spark://10.22.16.140:63893/classes | ... |spark.app.name |Spark shell | |spark.driver.memory |4G | |spark.executor.id |driver | |spark.submit.deployMode |client | |spark.master |local[*] | |spark.home |/Users/dhyun/spark | |spark.sql.catalogImplementation|hive | |spark.app.id |local-1484333618945 | ``` **AFTER** ``` scala> sql("set").show(false) ... |spark.app.id |local-1484333925649 | |spark.app.name |Spark shell | |spark.driver.host |10.22.16.140 | |spark.driver.memory |4G | |spark.driver.port |64994 | |spark.executor.id |driver | |spark.jars | | |spark.master |local[*] | |spark.repl.class.uri |spark://10.22.16.140:64994/classes | |spark.sql.catalogImplementation|hive | |spark.submit.deployMode |client | ``` ## How was this patch tested? Jenkins with a new test case. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #16579 from dongjoon-hyun/SPARK-19218.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala27
1 files changed, 27 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 8f1beaa3a1..07b787a191 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -982,6 +982,33 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
spark.sessionState.conf.clear()
}
+ test("SPARK-19218 SET command should show a result in a sorted order") {
+ val overrideConfs = sql("SET").collect()
+ sql(s"SET test.key3=1")
+ sql(s"SET test.key2=2")
+ sql(s"SET test.key1=3")
+ val result = sql("SET").collect()
+ assert(result ===
+ (overrideConfs ++ Seq(
+ Row("test.key1", "3"),
+ Row("test.key2", "2"),
+ Row("test.key3", "1"))).sortBy(_.getString(0))
+ )
+ spark.sessionState.conf.clear()
+ }
+
+ test("SPARK-19218 `SET -v` should not fail with null value configuration") {
+ import SQLConf._
+ val confEntry = SQLConfigBuilder("spark.test").doc("doc").stringConf.createWithDefault(null)
+
+ try {
+ val result = sql("SET -v").collect()
+ assert(result === result.sortBy(_.getString(0)))
+ } finally {
+ SQLConf.unregister(confEntry)
+ }
+ }
+
test("SET commands with illegal or inappropriate argument") {
spark.sessionState.conf.clear()
// Set negative mapred.reduce.tasks for automatically determining