aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSean Zhong <seanzhong@databricks.com>2016-09-06 10:50:07 +0800
committerWenchen Fan <wenchen@databricks.com>2016-09-06 10:50:07 +0800
commitafb3d5d301d004fd748ad305b3d72066af4ebb6c (patch)
tree3b62d08693555454f8f74ab4cbdcb5c221bcf262
parent8d08f43d09157b98e559c0be6ce6fd571a35e0d1 (diff)
downloadspark-afb3d5d301d004fd748ad305b3d72066af4ebb6c.tar.gz
spark-afb3d5d301d004fd748ad305b3d72066af4ebb6c.tar.bz2
spark-afb3d5d301d004fd748ad305b3d72066af4ebb6c.zip
[SPARK-17369][SQL] MetastoreRelation toJSON throws AssertException due to missing otherCopyArgs
## What changes were proposed in this pull request? `TreeNode.toJSON` requires a subclass to explicitly override otherCopyArgs to include currying construction arguments, otherwise it reports AssertException telling that the construction argument values' count doesn't match the construction argument names' count. For class `MetastoreRelation`, it has a currying construction parameter `client: HiveClient`, but Spark forgets to add it to the list of otherCopyArgs. ## How was this patch tested? Unit tests. Author: Sean Zhong <seanzhong@databricks.com> Closes #14928 from clockfly/metastore_relation_toJSON.
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreRelationSuite.scala39
2 files changed, 40 insertions, 1 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
index 0bfdc137fa..33f0ecff63 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
@@ -59,7 +59,7 @@ private[hive] case class MetastoreRelation(
Objects.hashCode(databaseName, tableName, output)
}
- override protected def otherCopyArgs: Seq[AnyRef] = catalogTable :: sparkSession :: Nil
+ override protected def otherCopyArgs: Seq[AnyRef] = catalogTable :: client :: sparkSession :: Nil
private def toHiveColumn(c: StructField): FieldSchema = {
new FieldSchema(c.name, c.dataType.catalogString, c.getComment.orNull)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreRelationSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreRelationSuite.scala
new file mode 100644
index 0000000000..2f3055dcac
--- /dev/null
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreRelationSuite.scala
@@ -0,0 +1,39 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.spark.sql.hive
+
+import org.apache.spark.SparkFunSuite
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
+import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
+
+class MetastoreRelationSuite extends SparkFunSuite {
+ test("makeCopy and toJSON should work") {
+ val table = CatalogTable(
+ identifier = TableIdentifier("test", Some("db")),
+ tableType = CatalogTableType.VIEW,
+ storage = CatalogStorageFormat.empty,
+ schema = StructType(StructField("a", IntegerType, true) :: Nil))
+ val relation = MetastoreRelation("db", "test")(table, null, null)
+
+ // No exception should be thrown
+ relation.makeCopy(Array("db", "test"))
+ // No exception should be thrown
+ relation.toJSON
+ }
+}