aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-08-02 18:27:04 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-02 18:27:15 -0700
commit5b30e001839a29e6c4bd1fc24bfa12d9166ef10c (patch)
treefe499822fa58fc9416e8664b76839b4e198679a2 /sql/hive/src
parent5ef828273deb4713a49700c56d51bdd980917cfd (diff)
downloadspark-5b30e001839a29e6c4bd1fc24bfa12d9166ef10c.tar.gz
spark-5b30e001839a29e6c4bd1fc24bfa12d9166ef10c.tar.bz2
spark-5b30e001839a29e6c4bd1fc24bfa12d9166ef10c.zip
[SPARK-2739][SQL] Rename registerAsTable to registerTempTable
There have been user complaints that the difference between `registerAsTable` and `saveAsTable` is too subtle. This PR addresses this by renaming `registerAsTable` to `registerTempTable`, which more clearly reflects what is happening. `registerAsTable` remains, but will cause a deprecation warning. Author: Michael Armbrust <michael@databricks.com> Closes #1743 from marmbrus/registerTempTable and squashes the following commits: d031348 [Michael Armbrust] Merge remote-tracking branch 'apache/master' into registerTempTable 4dff086 [Michael Armbrust] Fix .java files too 89a2f12 [Michael Armbrust] Merge remote-tracking branch 'apache/master' into registerTempTable 0b7b71e [Michael Armbrust] Rename registerAsTable to registerTempTable (cherry picked from commit 1a8043739dc1d9435def6ea3c6341498ba52b708) Signed-off-by: Michael Armbrust <michael@databricks.com>
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala8
5 files changed, 12 insertions, 12 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
index 833f350215..7e323146f9 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
@@ -28,7 +28,7 @@ case class TestData(key: Int, value: String)
class InsertIntoHiveTableSuite extends QueryTest {
val testData = TestHive.sparkContext.parallelize(
(1 to 100).map(i => TestData(i, i.toString)))
- testData.registerAsTable("testData")
+ testData.registerTempTable("testData")
test("insertInto() HiveTable") {
createTable[TestData]("createAndInsertTest")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
index 10c8069a62..578f27574a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
@@ -63,7 +63,7 @@ class JavaHiveQLSuite extends FunSuite {
javaHiveCtx.hql(s"CREATE TABLE $tableName(key INT, value STRING)").count()
}
- javaHiveCtx.hql("SHOW TABLES").registerAsTable("show_tables")
+ javaHiveCtx.hql("SHOW TABLES").registerTempTable("show_tables")
assert(
javaHiveCtx
@@ -73,7 +73,7 @@ class JavaHiveQLSuite extends FunSuite {
.contains(tableName))
assertResult(Array(Array("key", "int", "None"), Array("value", "string", "None"))) {
- javaHiveCtx.hql(s"DESCRIBE $tableName").registerAsTable("describe_table")
+ javaHiveCtx.hql(s"DESCRIBE $tableName").registerTempTable("describe_table")
javaHiveCtx
.hql("SELECT result FROM describe_table")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 89cc589fb8..4ed41550cf 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -247,7 +247,7 @@ class HiveQuerySuite extends HiveComparisonTest {
TestHive.sparkContext.parallelize(
TestData(1, "str1") ::
TestData(2, "str2") :: Nil)
- testData.registerAsTable("REGisteredTABle")
+ testData.registerTempTable("REGisteredTABle")
assertResult(Array(Array(2, "str2"))) {
hql("SELECT tablealias.A, TABLEALIAS.b FROM reGisteredTABle TableAlias " +
@@ -272,7 +272,7 @@ class HiveQuerySuite extends HiveComparisonTest {
test("SPARK-2180: HAVING support in GROUP BY clauses (positive)") {
val fixture = List(("foo", 2), ("bar", 1), ("foo", 4), ("bar", 3))
.zipWithIndex.map {case Pair(Pair(value, attr), key) => HavingRow(key, value, attr)}
- TestHive.sparkContext.parallelize(fixture).registerAsTable("having_test")
+ TestHive.sparkContext.parallelize(fixture).registerTempTable("having_test")
val results =
hql("SELECT value, max(attr) AS attr FROM having_test GROUP BY value HAVING attr > 3")
.collect()
@@ -401,7 +401,7 @@ class HiveQuerySuite extends HiveComparisonTest {
TestHive.sparkContext.parallelize(
TestData(1, "str1") ::
TestData(1, "str2") :: Nil)
- testData.registerAsTable("test_describe_commands2")
+ testData.registerTempTable("test_describe_commands2")
assertResult(
Array(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala
index fb03db12a0..2455c18925 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveResolutionSuite.scala
@@ -54,14 +54,14 @@ class HiveResolutionSuite extends HiveComparisonTest {
test("case insensitivity with scala reflection") {
// Test resolution with Scala Reflection
TestHive.sparkContext.parallelize(Data(1, 2, Nested(1,2), Seq(Nested(1,2))) :: Nil)
- .registerAsTable("caseSensitivityTest")
+ .registerTempTable("caseSensitivityTest")
hql("SELECT a, b, A, B, n.a, n.b, n.A, n.B FROM caseSensitivityTest")
}
test("nested repeated resolution") {
TestHive.sparkContext.parallelize(Data(1, 2, Nested(1,2), Seq(Nested(1,2))) :: Nil)
- .registerAsTable("nestedRepeatedTest")
+ .registerTempTable("nestedRepeatedTest")
assert(hql("SELECT nestedArray[0].a FROM nestedRepeatedTest").collect().head(0) === 1)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
index 47526e3596..6545e8d7dc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
@@ -41,7 +41,7 @@ class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAft
// write test data
ParquetTestData.writeFile()
testRDD = parquetFile(ParquetTestData.testDir.toString)
- testRDD.registerAsTable("testsource")
+ testRDD.registerTempTable("testsource")
}
override def afterAll() {
@@ -67,7 +67,7 @@ class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAft
.map(i => Cases(i, i))
.saveAsParquetFile(tempFile.getCanonicalPath)
- parquetFile(tempFile.getCanonicalPath).registerAsTable("cases")
+ parquetFile(tempFile.getCanonicalPath).registerTempTable("cases")
hql("SELECT upper FROM cases").collect().map(_.getString(0)) === (1 to 10).map(_.toString)
hql("SELECT LOWER FROM cases").collect().map(_.getString(0)) === (1 to 10).map(_.toString)
}
@@ -86,7 +86,7 @@ class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAft
test("Converting Hive to Parquet Table via saveAsParquetFile") {
hql("SELECT * FROM src").saveAsParquetFile(dirname.getAbsolutePath)
- parquetFile(dirname.getAbsolutePath).registerAsTable("ptable")
+ parquetFile(dirname.getAbsolutePath).registerTempTable("ptable")
val rddOne = hql("SELECT * FROM src").collect().sortBy(_.getInt(0))
val rddTwo = hql("SELECT * from ptable").collect().sortBy(_.getInt(0))
compareRDDs(rddOne, rddTwo, "src (Hive)", Seq("key:Int", "value:String"))
@@ -94,7 +94,7 @@ class HiveParquetSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAft
test("INSERT OVERWRITE TABLE Parquet table") {
hql("SELECT * FROM testsource").saveAsParquetFile(dirname.getAbsolutePath)
- parquetFile(dirname.getAbsolutePath).registerAsTable("ptable")
+ parquetFile(dirname.getAbsolutePath).registerTempTable("ptable")
// let's do three overwrites for good measure
hql("INSERT OVERWRITE TABLE ptable SELECT * FROM testsource").collect()
hql("INSERT OVERWRITE TABLE ptable SELECT * FROM testsource").collect()