aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2015-04-27 19:50:55 -0400
committerSean Owen <sowen@cloudera.com>2015-04-27 19:50:55 -0400
commitab5adb7a973eec9d95c7575c864cba9f8d83a0fd (patch)
treeae05e0003efe275028382b16a842660f6e50e43c /sql
parent5d45e1f60059e2f2fc8ad64778b9ddcc8887c570 (diff)
downloadspark-ab5adb7a973eec9d95c7575c864cba9f8d83a0fd.tar.gz
spark-ab5adb7a973eec9d95c7575c864cba9f8d83a0fd.tar.bz2
spark-ab5adb7a973eec9d95c7575c864cba9f8d83a0fd.zip
[SPARK-7145] [CORE] commons-lang (2.x) classes used instead of commons-lang3 (3.x); commons-io used without dependency
Remove use of commons-lang in favor of commons-lang3 classes; remove commons-io use in favor of Guava Author: Sean Owen <sowen@cloudera.com> Closes #5703 from srowen/SPARK-7145 and squashes the following commits: 21fbe03 [Sean Owen] Remove use of commons-lang in favor of commons-lang3 classes; remove commons-io use in favor of Guava
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala6
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/AbstractSparkSQLDriver.scala4
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala8
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDFListString.java6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala9
5 files changed, 17 insertions, 16 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
index f326510042..f3b5455574 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
@@ -20,7 +20,8 @@ package org.apache.spark.sql.jdbc
import java.sql.{Connection, DriverManager, ResultSet, ResultSetMetaData, SQLException}
import java.util.Properties
-import org.apache.commons.lang.StringEscapeUtils.escapeSql
+import org.apache.commons.lang3.StringUtils
+
import org.apache.spark.{Logging, Partition, SparkContext, TaskContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.{Row, SpecificMutableRow}
@@ -239,6 +240,9 @@ private[sql] class JDBCRDD(
case _ => value
}
+ private def escapeSql(value: String): String =
+ if (value == null) null else StringUtils.replace(value, "'", "''")
+
/**
* Turns a single Filter into a String representing a SQL expression.
* Returns null for an unhandled filter.
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/AbstractSparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/AbstractSparkSQLDriver.scala
index 59f3a75768..48ac9062af 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/AbstractSparkSQLDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/AbstractSparkSQLDriver.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver
import scala.collection.JavaConversions._
-import org.apache.commons.lang.exception.ExceptionUtils
+import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema}
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse
@@ -61,7 +61,7 @@ private[hive] abstract class AbstractSparkSQLDriver(
} catch {
case cause: Throwable =>
logError(s"Failed in [$command]", cause)
- new CommandProcessorResponse(1, ExceptionUtils.getFullStackTrace(cause), null)
+ new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null)
}
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index 7e307bb4ad..b7b6925aa8 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -24,18 +24,16 @@ import java.util.{ArrayList => JArrayList}
import jline.{ConsoleReader, History}
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
import org.apache.commons.logging.LogFactory
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.cli.{CliDriver, CliSessionState, OptionsProcessor}
-import org.apache.hadoop.hive.common.LogUtils.LogInitializationException
-import org.apache.hadoop.hive.common.{HiveInterruptCallback, HiveInterruptUtils, LogUtils}
+import org.apache.hadoop.hive.common.{HiveInterruptCallback, HiveInterruptUtils}
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.exec.Utilities
-import org.apache.hadoop.hive.ql.processors.{AddResourceProcessor, SetProcessor, CommandProcessor, CommandProcessorFactory}
+import org.apache.hadoop.hive.ql.processors.{AddResourceProcessor, SetProcessor, CommandProcessor}
import org.apache.hadoop.hive.ql.session.SessionState
-import org.apache.hadoop.hive.shims.ShimLoader
import org.apache.thrift.transport.TSocket
import org.apache.spark.Logging
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDFListString.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDFListString.java
index efd34df293..f33210ebda 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDFListString.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDFListString.java
@@ -17,10 +17,10 @@
package org.apache.spark.sql.hive.execution;
-import org.apache.hadoop.hive.ql.exec.UDF;
-
import java.util.List;
-import org.apache.commons.lang.StringUtils;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.ql.exec.UDF;
public class UDFListString extends UDF {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index e09c702c89..0538aa203c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer
import org.scalatest.BeforeAndAfterEach
-import org.apache.commons.io.FileUtils
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.metastore.TableType
import org.apache.hadoop.hive.ql.metadata.Table
@@ -174,7 +173,7 @@ class MetastoreDataSourcesSuite extends QueryTest with BeforeAndAfterEach {
sql("SELECT * FROM jsonTable"),
Row("a", "b"))
- FileUtils.deleteDirectory(tempDir)
+ Utils.deleteRecursively(tempDir)
sparkContext.parallelize(("a1", "b1", "c1") :: Nil).toDF()
.toJSON.saveAsTextFile(tempDir.getCanonicalPath)
@@ -190,7 +189,7 @@ class MetastoreDataSourcesSuite extends QueryTest with BeforeAndAfterEach {
checkAnswer(
sql("SELECT * FROM jsonTable"),
Row("a1", "b1", "c1"))
- FileUtils.deleteDirectory(tempDir)
+ Utils.deleteRecursively(tempDir)
}
test("drop, change, recreate") {
@@ -212,7 +211,7 @@ class MetastoreDataSourcesSuite extends QueryTest with BeforeAndAfterEach {
sql("SELECT * FROM jsonTable"),
Row("a", "b"))
- FileUtils.deleteDirectory(tempDir)
+ Utils.deleteRecursively(tempDir)
sparkContext.parallelize(("a", "b", "c") :: Nil).toDF()
.toJSON.saveAsTextFile(tempDir.getCanonicalPath)
@@ -231,7 +230,7 @@ class MetastoreDataSourcesSuite extends QueryTest with BeforeAndAfterEach {
checkAnswer(
sql("SELECT * FROM jsonTable"),
Row("a", "b", "c"))
- FileUtils.deleteDirectory(tempDir)
+ Utils.deleteRecursively(tempDir)
}
test("invalidate cache and reload") {