aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2015-08-25 12:33:13 +0100
committerSean Owen <sowen@cloudera.com>2015-08-25 12:33:13 +0100
commit69c9c177160e32a2fbc9b36ecc52156077fca6fc (patch)
tree57345aaf19c3149038bfca5c4ddccf33d41bdd5b /sql/hive-thriftserver
parent7f1e507bf7e82bff323c5dec3c1ee044687c4173 (diff)
downloadspark-69c9c177160e32a2fbc9b36ecc52156077fca6fc.tar.gz
spark-69c9c177160e32a2fbc9b36ecc52156077fca6fc.tar.bz2
spark-69c9c177160e32a2fbc9b36ecc52156077fca6fc.zip
[SPARK-9613] [CORE] Ban use of JavaConversions and migrate all existing uses to JavaConverters
Replace `JavaConversions` implicits with `JavaConverters` Most occurrences I've seen so far are necessary conversions; a few have been avoidable. None are in critical code as far as I see, yet. Author: Sean Owen <sowen@cloudera.com> Closes #8033 from srowen/SPARK-9613.
Diffstat (limited to 'sql/hive-thriftserver')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala10
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala16
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala6
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala14
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala4
5 files changed, 25 insertions, 25 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
index 02cc7e5efa..306f98bcb5 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
@@ -20,9 +20,9 @@ package org.apache.spark.sql.hive.thriftserver
import java.security.PrivilegedExceptionAction
import java.sql.{Date, Timestamp}
import java.util.concurrent.RejectedExecutionException
-import java.util.{Map => JMap, UUID}
+import java.util.{Arrays, Map => JMap, UUID}
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, Map => SMap}
import scala.util.control.NonFatal
@@ -126,13 +126,13 @@ private[hive] class SparkExecuteStatementOperation(
def getResultSetSchema: TableSchema = {
if (result == null || result.queryExecution.analyzed.output.size == 0) {
- new TableSchema(new FieldSchema("Result", "string", "") :: Nil)
+ new TableSchema(Arrays.asList(new FieldSchema("Result", "string", "")))
} else {
logInfo(s"Result Schema: ${result.queryExecution.analyzed.output}")
val schema = result.queryExecution.analyzed.output.map { attr =>
new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "")
}
- new TableSchema(schema)
+ new TableSchema(schema.asJava)
}
}
@@ -298,7 +298,7 @@ private[hive] class SparkExecuteStatementOperation(
sqlOperationConf = new HiveConf(sqlOperationConf)
// apply overlay query specific settings, if any
- getConfOverlay().foreach { case (k, v) =>
+ getConfOverlay().asScala.foreach { case (k, v) =>
try {
sqlOperationConf.verifyAndSet(k, v)
} catch {
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index 7799704c81..a29df56798 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -17,11 +17,11 @@
package org.apache.spark.sql.hive.thriftserver
-import scala.collection.JavaConversions._
-
import java.io._
import java.util.{ArrayList => JArrayList, Locale}
+import scala.collection.JavaConverters._
+
import jline.console.ConsoleReader
import jline.console.history.FileHistory
@@ -101,9 +101,9 @@ private[hive] object SparkSQLCLIDriver extends Logging {
// Set all properties specified via command line.
val conf: HiveConf = sessionState.getConf
- sessionState.cmdProperties.entrySet().foreach { item =>
- val key = item.getKey.asInstanceOf[String]
- val value = item.getValue.asInstanceOf[String]
+ sessionState.cmdProperties.entrySet().asScala.foreach { item =>
+ val key = item.getKey.toString
+ val value = item.getValue.toString
// We do not propagate metastore options to the execution copy of hive.
if (key != "javax.jdo.option.ConnectionURL") {
conf.set(key, value)
@@ -316,15 +316,15 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER)) {
// Print the column names.
- Option(driver.getSchema.getFieldSchemas).map { fields =>
- out.println(fields.map(_.getName).mkString("\t"))
+ Option(driver.getSchema.getFieldSchemas).foreach { fields =>
+ out.println(fields.asScala.map(_.getName).mkString("\t"))
}
}
var counter = 0
try {
while (!out.checkError() && driver.getResults(res)) {
- res.foreach{ l =>
+ res.asScala.foreach { l =>
counter += 1
out.println(l)
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala
index 644165acf7..5ad8c54f29 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala
@@ -21,6 +21,8 @@ import java.io.IOException
import java.util.{List => JList}
import javax.security.auth.login.LoginException
+import scala.collection.JavaConverters._
+
import org.apache.commons.logging.Log
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.shims.Utils
@@ -34,8 +36,6 @@ import org.apache.hive.service.{AbstractService, Service, ServiceException}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.hive.thriftserver.ReflectionUtils._
-import scala.collection.JavaConversions._
-
private[hive] class SparkSQLCLIService(hiveServer: HiveServer2, hiveContext: HiveContext)
extends CLIService(hiveServer)
with ReflectedCompositeService {
@@ -76,7 +76,7 @@ private[thriftserver] trait ReflectedCompositeService { this: AbstractService =>
def initCompositeService(hiveConf: HiveConf) {
// Emulating `CompositeService.init(hiveConf)`
val serviceList = getAncestorField[JList[Service]](this, 2, "serviceList")
- serviceList.foreach(_.init(hiveConf))
+ serviceList.asScala.foreach(_.init(hiveConf))
// Emulating `AbstractService.init(hiveConf)`
invoke(classOf[AbstractService], this, "ensureCurrentState", classOf[STATE] -> STATE.NOTINITED)
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
index 77272aecf2..2619286afc 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
@@ -17,7 +17,9 @@
package org.apache.spark.sql.hive.thriftserver
-import java.util.{ArrayList => JArrayList, List => JList}
+import java.util.{Arrays, ArrayList => JArrayList, List => JList}
+
+import scala.collection.JavaConverters._
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema}
@@ -27,8 +29,6 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse
import org.apache.spark.Logging
import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes}
-import scala.collection.JavaConversions._
-
private[hive] class SparkSQLDriver(
val context: HiveContext = SparkSQLEnv.hiveContext)
extends Driver
@@ -43,14 +43,14 @@ private[hive] class SparkSQLDriver(
private def getResultSetSchema(query: context.QueryExecution): Schema = {
val analyzed = query.analyzed
logDebug(s"Result Schema: ${analyzed.output}")
- if (analyzed.output.size == 0) {
- new Schema(new FieldSchema("Response code", "string", "") :: Nil, null)
+ if (analyzed.output.isEmpty) {
+ new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null)
} else {
val fieldSchemas = analyzed.output.map { attr =>
new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "")
}
- new Schema(fieldSchemas, null)
+ new Schema(fieldSchemas.asJava, null)
}
}
@@ -79,7 +79,7 @@ private[hive] class SparkSQLDriver(
if (hiveResponse == null) {
false
} else {
- res.asInstanceOf[JArrayList[String]].addAll(hiveResponse)
+ res.asInstanceOf[JArrayList[String]].addAll(hiveResponse.asJava)
hiveResponse = null
true
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index 1d41c46131..bacf6cc458 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver
import java.io.PrintStream
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import org.apache.spark.scheduler.StatsReportListener
import org.apache.spark.sql.hive.HiveContext
@@ -64,7 +64,7 @@ private[hive] object SparkSQLEnv extends Logging {
hiveContext.setConf("spark.sql.hive.version", HiveContext.hiveExecutionVersion)
if (log.isDebugEnabled) {
- hiveContext.hiveconf.getAllProperties.toSeq.sorted.foreach { case (k, v) =>
+ hiveContext.hiveconf.getAllProperties.asScala.toSeq.sorted.foreach { case (k, v) =>
logDebug(s"HiveConf var: $k=$v")
}
}