diff options
author | gatorsmile <gatorsmile@gmail.com> | 2016-05-21 20:07:34 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-05-21 20:07:34 -0700 |
commit | 8f0a3d5bcba313dc3b70d4aa9a8ba2aa2d276062 (patch) | |
tree | f35feb55bf0c1fe384f0cdf0882fc9648f7eb513 /sql/hive-thriftserver | |
parent | c18fa464f404ed2612f8c4d355cb0544b355975b (diff) | |
download | spark-8f0a3d5bcba313dc3b70d4aa9a8ba2aa2d276062.tar.gz spark-8f0a3d5bcba313dc3b70d4aa9a8ba2aa2d276062.tar.bz2 spark-8f0a3d5bcba313dc3b70d4aa9a8ba2aa2d276062.zip |
[SPARK-15330][SQL] Implement Reset Command
#### What changes were proposed in this pull request?
Like `Set` Command in Hive, `Reset` is also supported by Hive. See the link: https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Cli
Below is the related Hive JIRA: https://issues.apache.org/jira/browse/HIVE-3202
This PR is to implement such a command for resetting the SQL-related configuration to the default values. One of the use case shown in HIVE-3202 is listed below:
> For the purpose of optimization we set various configs per query. It's worthy but all those configs should be reset every time for next query.
#### How was this patch tested?
Added a test case.
Author: gatorsmile <gatorsmile@gmail.com>
Author: xiaoli <lixiao1983@gmail.com>
Author: Xiao Li <xiaoli@Xiaos-MacBook-Pro.local>
Closes #13121 from gatorsmile/resetCommand.
Diffstat (limited to 'sql/hive-thriftserver')
-rw-r--r-- | sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 1402e0a687..33ff8aee79 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -32,8 +32,8 @@ import org.apache.hadoop.hive.common.{HiveInterruptCallback, HiveInterruptUtils} import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.exec.Utilities -import org.apache.hadoop.hive.ql.processors.{AddResourceProcessor, CommandProcessor, - CommandProcessorFactory, SetProcessor} +import org.apache.hadoop.hive.ql.processors.{AddResourceProcessor, CommandProcessor} +import org.apache.hadoop.hive.ql.processors.{CommandProcessorFactory, ResetProcessor, SetProcessor} import org.apache.hadoop.hive.ql.session.SessionState import org.apache.thrift.transport.TSocket @@ -312,7 +312,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging { if (proc != null) { // scalastyle:off println if (proc.isInstanceOf[Driver] || proc.isInstanceOf[SetProcessor] || - proc.isInstanceOf[AddResourceProcessor]) { + proc.isInstanceOf[AddResourceProcessor] || proc.isInstanceOf[ResetProcessor]) { val driver = new SparkSQLDriver driver.init() |