aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-02-04 12:20:18 -0800
committerReynold Xin <rxin@databricks.com>2016-02-04 12:20:18 -0800
commitbd38dd6f75c4af0f8f32bb21a82da53fffa5e825 (patch)
treed86d978ae472f1a793f7ad2bd16c71e2b7237076 /sql
parentc756bda477f458ba4aad7fdb2026263507e0ad9b (diff)
downloadspark-bd38dd6f75c4af0f8f32bb21a82da53fffa5e825.tar.gz
spark-bd38dd6f75c4af0f8f32bb21a82da53fffa5e825.tar.bz2
spark-bd38dd6f75c4af0f8f32bb21a82da53fffa5e825.zip
[SPARK-13079][SQL] InMemoryCatalog follow-ups
This patch incorporates review feedback from #11069, which is already merged. Author: Andrew Or <andrew@databricks.com> Closes #11080 from andrewor14/catalog-follow-ups.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala15
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala12
2 files changed, 22 insertions, 5 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
index b4d7dd2f4e..56aaa6bc6c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
@@ -39,6 +39,9 @@ abstract class Catalog {
def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit
+ /**
+ * Alter an existing database. This operation does not support renaming.
+ */
def alterDatabase(db: String, dbDefinition: Database): Unit
def getDatabase(db: String): Database
@@ -57,6 +60,9 @@ abstract class Catalog {
def renameTable(db: String, oldName: String, newName: String): Unit
+ /**
+ * Alter an existing table. This operation does not support renaming.
+ */
def alterTable(db: String, table: String, tableDefinition: Table): Unit
def getTable(db: String, table: String): Table
@@ -81,6 +87,9 @@ abstract class Catalog {
parts: Seq[PartitionSpec],
ignoreIfNotExists: Boolean): Unit
+ /**
+ * Alter an existing table partition and optionally override its spec.
+ */
def alterPartition(
db: String,
table: String,
@@ -100,6 +109,9 @@ abstract class Catalog {
def dropFunction(db: String, funcName: String): Unit
+ /**
+ * Alter an existing function and optionally override its name.
+ */
def alterFunction(db: String, funcName: String, funcDefinition: Function): Unit
def getFunction(db: String, funcName: String): Function
@@ -194,5 +206,8 @@ case class Database(
object Catalog {
+ /**
+ * Specifications of a table partition indexed by column name.
+ */
type PartitionSpec = Map[String, String]
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
index 0d8434323f..45c5ceecb0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
@@ -27,10 +27,10 @@ import org.apache.spark.sql.AnalysisException
* Implementations of the [[Catalog]] interface can create test suites by extending this.
*/
abstract class CatalogTestCases extends SparkFunSuite {
- private val storageFormat = StorageFormat("usa", "$", "zzz", "serde", Map.empty[String, String])
- private val part1 = TablePartition(Map[String, String]("a" -> "1"), storageFormat)
- private val part2 = TablePartition(Map[String, String]("b" -> "2"), storageFormat)
- private val part3 = TablePartition(Map[String, String]("c" -> "3"), storageFormat)
+ private val storageFormat = StorageFormat("usa", "$", "zzz", "serde", Map())
+ private val part1 = TablePartition(Map("a" -> "1"), storageFormat)
+ private val part2 = TablePartition(Map("b" -> "2"), storageFormat)
+ private val part3 = TablePartition(Map("c" -> "3"), storageFormat)
private val funcClass = "org.apache.spark.myFunc"
protected def newEmptyCatalog(): Catalog
@@ -42,6 +42,8 @@ abstract class CatalogTestCases extends SparkFunSuite {
* db2
* - tbl1
* - tbl2
+ * - part1
+ * - part2
* - func1
*/
private def newBasicCatalog(): Catalog = {
@@ -50,8 +52,8 @@ abstract class CatalogTestCases extends SparkFunSuite {
catalog.createDatabase(newDb("db2"), ignoreIfExists = false)
catalog.createTable("db2", newTable("tbl1"), ignoreIfExists = false)
catalog.createTable("db2", newTable("tbl2"), ignoreIfExists = false)
- catalog.createFunction("db2", newFunc("func1"), ignoreIfExists = false)
catalog.createPartitions("db2", "tbl2", Seq(part1, part2), ignoreIfExists = false)
+ catalog.createFunction("db2", newFunc("func1"), ignoreIfExists = false)
catalog
}