aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorzsxwing <zsxwing@gmail.com>2015-09-19 18:22:43 -0700
committerYin Huai <yhuai@databricks.com>2015-09-19 18:22:43 -0700
commite789000b88a6bd840f821c53f42c08b97dc02496 (patch)
tree454cf87addc9fe93e9eab98744ce5daa4dcfb6e3 /sql/hive
parentd83b6aae8b4357c56779cc98804eb350ab8af62d (diff)
downloadspark-e789000b88a6bd840f821c53f42c08b97dc02496.tar.gz
spark-e789000b88a6bd840f821c53f42c08b97dc02496.tar.bz2
spark-e789000b88a6bd840f821c53f42c08b97dc02496.zip
[SPARK-10155] [SQL] Change SqlParser to object to avoid memory leak
Since `scala.util.parsing.combinator.Parsers` is thread-safe since Scala 2.10 (See [SI-4929](https://issues.scala-lang.org/browse/SI-4929)), we can change SqlParser to object to avoid memory leak. I didn't change other subclasses of `scala.util.parsing.combinator.Parsers` because there is only one instance in one SQLContext, which should not be an issue. Author: zsxwing <zsxwing@gmail.com> Closes #8357 from zsxwing/sql-memory-leak.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala6
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala4
2 files changed, 5 insertions, 5 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index d37ba5ddc2..c12a734863 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -291,12 +291,12 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) with Logging {
* @since 1.3.0
*/
def refreshTable(tableName: String): Unit = {
- val tableIdent = new SqlParser().parseTableIdentifier(tableName)
+ val tableIdent = SqlParser.parseTableIdentifier(tableName)
catalog.refreshTable(tableIdent)
}
protected[hive] def invalidateTable(tableName: String): Unit = {
- val tableIdent = new SqlParser().parseTableIdentifier(tableName)
+ val tableIdent = SqlParser.parseTableIdentifier(tableName)
catalog.invalidateTable(tableIdent)
}
@@ -311,7 +311,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) with Logging {
*/
@Experimental
def analyze(tableName: String) {
- val tableIdent = new SqlParser().parseTableIdentifier(tableName)
+ val tableIdent = SqlParser.parseTableIdentifier(tableName)
val relation = EliminateSubQueries(catalog.lookupRelation(tableIdent.toSeq))
relation match {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 0a5569b0a4..0c1b41e337 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -199,7 +199,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
options: Map[String, String],
isExternal: Boolean): Unit = {
createDataSourceTable(
- new SqlParser().parseTableIdentifier(tableName),
+ SqlParser.parseTableIdentifier(tableName),
userSpecifiedSchema,
partitionColumns,
provider,
@@ -375,7 +375,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
}
def hiveDefaultTableFilePath(tableName: String): String = {
- hiveDefaultTableFilePath(new SqlParser().parseTableIdentifier(tableName))
+ hiveDefaultTableFilePath(SqlParser.parseTableIdentifier(tableName))
}
def hiveDefaultTableFilePath(tableIdent: TableIdentifier): String = {