aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-08-02 18:27:04 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-02 18:27:04 -0700
commit1a8043739dc1d9435def6ea3c6341498ba52b708 (patch)
treefe499822fa58fc9416e8664b76839b4e198679a2 /sql/core/src/main
parentd210022e96804e59e42ab902e53637e50884a9ab (diff)
downloadspark-1a8043739dc1d9435def6ea3c6341498ba52b708.tar.gz
spark-1a8043739dc1d9435def6ea3c6341498ba52b708.tar.bz2
spark-1a8043739dc1d9435def6ea3c6341498ba52b708.zip
[SPARK-2739][SQL] Rename registerAsTable to registerTempTable
There have been user complaints that the difference between `registerAsTable` and `saveAsTable` is too subtle. This PR addresses this by renaming `registerAsTable` to `registerTempTable`, which more clearly reflects what is happening. `registerAsTable` remains, but will cause a deprecation warning. Author: Michael Armbrust <michael@databricks.com> Closes #1743 from marmbrus/registerTempTable and squashes the following commits: d031348 [Michael Armbrust] Merge remote-tracking branch 'apache/master' into registerTempTable 4dff086 [Michael Armbrust] Fix .java files too 89a2f12 [Michael Armbrust] Merge remote-tracking branch 'apache/master' into registerTempTable 0b7b71e [Michael Armbrust] Rename registerAsTable to registerTempTable
Diffstat (limited to 'sql/core/src/main')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala5
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala2
4 files changed, 8 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index 33931e5d99..567f4dca99 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -116,7 +116,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
* // |-- name: string (nullable = false)
* // |-- age: integer (nullable = true)
*
- * peopleSchemaRDD.registerAsTable("people")
+ * peopleSchemaRDD.registerTempTable("people")
* sqlContext.sql("select name from people").collect.foreach(println)
* }}}
*
@@ -212,7 +212,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
* import sqlContext._
*
* case class Person(name: String, age: Int)
- * createParquetFile[Person]("path/to/file.parquet").registerAsTable("people")
+ * createParquetFile[Person]("path/to/file.parquet").registerTempTable("people")
* sql("INSERT INTO people SELECT 'michael', 29")
* }}}
*
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala
index d34f62dc88..57df79321b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala
@@ -67,7 +67,7 @@ import org.apache.spark.api.java.JavaRDD
* val rdd = sc.parallelize((1 to 100).map(i => Record(i, s"val_$i")))
* // Any RDD containing case classes can be registered as a table. The schema of the table is
* // automatically inferred using scala reflection.
- * rdd.registerAsTable("records")
+ * rdd.registerTempTable("records")
*
* val results: SchemaRDD = sql("SELECT * FROM records")
* }}}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala
index 6a20def475..2f3033a5f9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDDLike.scala
@@ -83,10 +83,13 @@ private[sql] trait SchemaRDDLike {
*
* @group schema
*/
- def registerAsTable(tableName: String): Unit = {
+ def registerTempTable(tableName: String): Unit = {
sqlContext.registerRDDAsTable(baseSchemaRDD, tableName)
}
+ @deprecated("Use registerTempTable instead of registerAsTable.", "1.1")
+ def registerAsTable(tableName: String): Unit = registerTempTable(tableName)
+
/**
* :: Experimental ::
* Adds the rows from this RDD to the specified table, optionally overwriting the existing data.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
index ae45193ed1..dbaa16e8b0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
@@ -52,7 +52,7 @@ class JavaSQLContext(val sqlContext: SQLContext) extends UDFRegistration {
* {{{
* JavaSQLContext sqlCtx = new JavaSQLContext(...)
*
- * sqlCtx.createParquetFile(Person.class, "path/to/file.parquet").registerAsTable("people")
+ * sqlCtx.createParquetFile(Person.class, "path/to/file.parquet").registerTempTable("people")
* sqlCtx.sql("INSERT INTO people SELECT 'michael', 29")
* }}}
*