diff options
author | Michael Armbrust <michael@databricks.com> | 2014-07-31 11:26:43 -0700 |
---|---|---|
committer | Michael Armbrust <michael@databricks.com> | 2014-07-31 11:26:43 -0700 |
commit | 72cfb13987bab07461266905930f84619b3a0068 (patch) | |
tree | f54cb89724c43628ba5b0fc8306c9bd53f74826e /dev | |
parent | 3072b96026fa3e63e8eef780f2b04dd81f11ea27 (diff) | |
download | spark-72cfb13987bab07461266905930f84619b3a0068.tar.gz spark-72cfb13987bab07461266905930f84619b3a0068.tar.bz2 spark-72cfb13987bab07461266905930f84619b3a0068.zip |
[SPARK-2397][SQL] Deprecate LocalHiveContext
LocalHiveContext is redundant with HiveContext. The only difference is it creates `./metastore` instead of `./metastore_db`.
Author: Michael Armbrust <michael@databricks.com>
Closes #1641 from marmbrus/localHiveContext and squashes the following commits:
e5ec497 [Michael Armbrust] Add deprecation version
626e056 [Michael Armbrust] Don't remove from imports yet
905cc5f [Michael Armbrust] Merge remote-tracking branch 'apache/master' into localHiveContext
1c2727e [Michael Armbrust] Deprecate LocalHiveContext
Diffstat (limited to 'dev')
-rw-r--r-- | dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala b/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala index 7257d17d10..a21410f3b9 100644 --- a/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala +++ b/dev/audit-release/sbt_app_hive/src/main/scala/HiveApp.scala @@ -22,7 +22,7 @@ import scala.collection.mutable.{ListBuffer, Queue} import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD -import org.apache.spark.sql.hive.LocalHiveContext +import org.apache.spark.sql.hive.HiveContext case class Person(name: String, age: Int) @@ -34,7 +34,7 @@ object SparkSqlExample { case None => new SparkConf().setAppName("Simple Sql App") } val sc = new SparkContext(conf) - val hiveContext = new LocalHiveContext(sc) + val hiveContext = new HiveContext(sc) import hiveContext._ hql("DROP TABLE IF EXISTS src") |