aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver/src/test/scala
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive-thriftserver/src/test/scala')
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala12
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala105
2 files changed, 111 insertions, 6 deletions
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 4cf95e7bdf..1fadea97fd 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -409,24 +409,24 @@ abstract class HiveThriftServer2Test extends FunSuite with BeforeAndAfterAll wit
private val CLASS_NAME = HiveThriftServer2.getClass.getCanonicalName.stripSuffix("$")
private val LOG_FILE_MARK = s"starting $CLASS_NAME, logging to "
- private val startScript = "../../sbin/start-thriftserver.sh".split("/").mkString(File.separator)
- private val stopScript = "../../sbin/stop-thriftserver.sh".split("/").mkString(File.separator)
+ protected val startScript = "../../sbin/start-thriftserver.sh".split("/").mkString(File.separator)
+ protected val stopScript = "../../sbin/stop-thriftserver.sh".split("/").mkString(File.separator)
private var listeningPort: Int = _
protected def serverPort: Int = listeningPort
protected def user = System.getProperty("user.name")
- private var warehousePath: File = _
- private var metastorePath: File = _
- private def metastoreJdbcUri = s"jdbc:derby:;databaseName=$metastorePath;create=true"
+ protected var warehousePath: File = _
+ protected var metastorePath: File = _
+ protected def metastoreJdbcUri = s"jdbc:derby:;databaseName=$metastorePath;create=true"
private val pidDir: File = Utils.createTempDir("thriftserver-pid")
private var logPath: File = _
private var logTailingProcess: Process = _
private var diagnosisBuffer: ArrayBuffer[String] = ArrayBuffer.empty[String]
- private def serverStartCommand(port: Int) = {
+ protected def serverStartCommand(port: Int) = {
val portConf = if (mode == ServerMode.binary) {
ConfVars.HIVE_SERVER2_THRIFT_PORT
} else {
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala
new file mode 100644
index 0000000000..47541015a3
--- /dev/null
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.hive.thriftserver
+
+
+
+import scala.util.Random
+
+import org.openqa.selenium.WebDriver
+import org.openqa.selenium.htmlunit.HtmlUnitDriver
+import org.scalatest.{Matchers, BeforeAndAfterAll}
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.selenium.WebBrowser
+import org.scalatest.time.SpanSugar._
+
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars
+import org.apache.spark.sql.hive.HiveContext
+
+
+class UISeleniumSuite
+ extends HiveThriftJdbcTest
+ with WebBrowser with Matchers with BeforeAndAfterAll {
+
+ implicit var webDriver: WebDriver = _
+ var server: HiveThriftServer2 = _
+ var hc: HiveContext = _
+ val uiPort = 20000 + Random.nextInt(10000)
+ override def mode: ServerMode.Value = ServerMode.binary
+
+ override def beforeAll(): Unit = {
+ webDriver = new HtmlUnitDriver
+ super.beforeAll()
+ }
+
+ override def afterAll(): Unit = {
+ if (webDriver != null) {
+ webDriver.quit()
+ }
+ super.afterAll()
+ }
+
+ override protected def serverStartCommand(port: Int) = {
+ val portConf = if (mode == ServerMode.binary) {
+ ConfVars.HIVE_SERVER2_THRIFT_PORT
+ } else {
+ ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT
+ }
+
+ s"""$startScript
+ | --master local
+ | --hiveconf hive.root.logger=INFO,console
+ | --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$metastoreJdbcUri
+ | --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
+ | --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST}=localhost
+ | --hiveconf ${ConfVars.HIVE_SERVER2_TRANSPORT_MODE}=$mode
+ | --hiveconf $portConf=$port
+ | --driver-class-path ${sys.props("java.class.path")}
+ | --conf spark.ui.enabled=true
+ | --conf spark.ui.port=$uiPort
+ """.stripMargin.split("\\s+").toSeq
+ }
+
+ test("thrift server ui test") {
+ withJdbcStatement(statement =>{
+ val baseURL = s"http://localhost:${uiPort}"
+
+ val queries = Seq(
+ "CREATE TABLE test_map(key INT, value STRING)",
+ s"LOAD DATA LOCAL INPATH '${TestData.smallKv}' OVERWRITE INTO TABLE test_map")
+
+ queries.foreach(statement.execute)
+
+ eventually(timeout(10 seconds), interval(50 milliseconds)) {
+ go to (baseURL)
+ find(cssSelector("""ul li a[href*="ThriftServer"]""")) should not be(None)
+ }
+
+ eventually(timeout(10 seconds), interval(50 milliseconds)) {
+ go to (baseURL + "/ThriftServer")
+ find(id("sessionstat")) should not be(None)
+ find(id("sqlstat")) should not be(None)
+
+ // check whether statements exists
+ queries.foreach { line =>
+ findAll(cssSelector("""ul table tbody tr td""")).map(_.text).toList should contain (line)
+ }
+ }
+ })
+ }
+}