aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJin Adachi <adachij2002@yahoo.co.jp>2015-04-16 23:41:04 +0800
committerCheng Lian <lian@databricks.com>2015-04-16 23:41:04 +0800
commit3ae37b93a7c299bd8b22a36248035bca5de3422f (patch)
tree7774e3bf5cd86bf7905d4d4d1ac6c42da61de4f1
parentde4fa6b6d12e2bee0307ffba2abfca0c33f15e45 (diff)
downloadspark-3ae37b93a7c299bd8b22a36248035bca5de3422f.tar.gz
spark-3ae37b93a7c299bd8b22a36248035bca5de3422f.tar.bz2
spark-3ae37b93a7c299bd8b22a36248035bca5de3422f.zip
[SPARK-6694][SQL]SparkSQL CLI must be able to specify an option --database on the command line.
SparkSQL CLI has an option --database as follows. But, the option --database is ignored. ``` $ spark-sql --help : CLI options: : --database <databasename> Specify the database to use ``` Author: Jin Adachi <adachij2002@yahoo.co.jp> Author: adachij <adachij@nttdata.co.jp> Closes #5345 from adachij2002/SPARK-6694 and squashes the following commits: 8659084 [Jin Adachi] Merge branch 'master' of https://github.com/apache/spark into SPARK-6694 0301eb9 [Jin Adachi] Merge branch 'master' of https://github.com/apache/spark into SPARK-6694 df81086 [Jin Adachi] Modify code style. 846f83e [Jin Adachi] Merge branch 'master' of https://github.com/apache/spark into SPARK-6694 dbe8c63 [Jin Adachi] Change file permission to 644. 7b58f42 [Jin Adachi] Merge branch 'master' of https://github.com/apache/spark into SPARK-6694 c581d06 [Jin Adachi] Add an option --database test db56122 [Jin Adachi] Merge branch 'SPARK-6694' of https://github.com/adachij2002/spark into SPARK-6694 ee09fa5 [adachij] Merge branch 'master' into SPARK-6694 c804c03 [adachij] SparkSQL CLI must be able to specify an option --database on the command line.
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala3
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala45
2 files changed, 39 insertions, 9 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index 62c061bef6..85281c6d73 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -145,6 +145,9 @@ private[hive] object SparkSQLCLIDriver {
case e: UnsupportedEncodingException => System.exit(3)
}
+ // use the specified database if specified
+ cli.processSelectDatabase(sessionState);
+
// Execute -i init files (always in silent mode)
cli.processInitFiles(sessionState)
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 6d1d7c3a4e..b070fa8eaa 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -25,22 +25,31 @@ import scala.concurrent.{Await, Promise}
import scala.sys.process.{Process, ProcessLogger}
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite}
import org.apache.spark.Logging
import org.apache.spark.util.Utils
-class CliSuite extends FunSuite with BeforeAndAfterAll with Logging {
+class CliSuite extends FunSuite with BeforeAndAfter with Logging {
+ val warehousePath = Utils.createTempDir()
+ val metastorePath = Utils.createTempDir()
+
+ before {
+ warehousePath.delete()
+ metastorePath.delete()
+ }
+
+ after {
+ warehousePath.delete()
+ metastorePath.delete()
+ }
+
def runCliWithin(
timeout: FiniteDuration,
extraArgs: Seq[String] = Seq.empty)(
- queriesAndExpectedAnswers: (String, String)*) {
+ queriesAndExpectedAnswers: (String, String)*): Unit = {
val (queries, expectedAnswers) = queriesAndExpectedAnswers.unzip
- val warehousePath = Utils.createTempDir()
- warehousePath.delete()
- val metastorePath = Utils.createTempDir()
- metastorePath.delete()
val cliScript = "../../bin/spark-sql".split("/").mkString(File.separator)
val command = {
@@ -95,8 +104,6 @@ class CliSuite extends FunSuite with BeforeAndAfterAll with Logging {
""".stripMargin, cause)
throw cause
} finally {
- warehousePath.delete()
- metastorePath.delete()
process.destroy()
}
}
@@ -124,4 +131,24 @@ class CliSuite extends FunSuite with BeforeAndAfterAll with Logging {
test("Single command with -e") {
runCliWithin(1.minute, Seq("-e", "SHOW DATABASES;"))("" -> "OK")
}
+
+ test("Single command with --database") {
+ runCliWithin(1.minute)(
+ "CREATE DATABASE hive_test_db;"
+ -> "OK",
+ "USE hive_test_db;"
+ -> "OK",
+ "CREATE TABLE hive_test(key INT, val STRING);"
+ -> "OK",
+ "SHOW TABLES;"
+ -> "Time taken: "
+ )
+
+ runCliWithin(1.minute, Seq("--database", "hive_test_db", "-e", "SHOW TABLES;"))(
+ ""
+ -> "OK",
+ ""
+ -> "hive_test"
+ )
+ }
}