aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorCheng Lian <lian@databricks.com>2015-02-06 15:23:42 -0800
committerCheng Lian <lian@databricks.com>2015-02-06 15:23:42 -0800
commitc4021401e326cd5a412a70425f5c75405284880e (patch)
tree15e3bdf5381b6ae7b92fc9158c4a9064cbab8dc5 /sql/hive
parent76c4bf59f6544b9c02d99fd18436427cbce632e6 (diff)
downloadspark-c4021401e326cd5a412a70425f5c75405284880e.tar.gz
spark-c4021401e326cd5a412a70425f5c75405284880e.tar.bz2
spark-c4021401e326cd5a412a70425f5c75405284880e.zip
[SQL] [Minor] HiveParquetSuite was disabled by mistake, re-enable them
<!-- Reviewable:start --> [<img src="https://reviewable.io/review_button.png" height=40 alt="Review on Reviewable"/>](https://reviewable.io/reviews/apache/spark/4440) <!-- Reviewable:end --> Author: Cheng Lian <lian@databricks.com> Closes #4440 from liancheng/parquet-oops and squashes the following commits: f21ede4 [Cheng Lian] HiveParquetSuite was disabled by mistake, re-enable them.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala13
1 files changed, 10 insertions, 3 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
index eae69af586..e89b4489f1 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.parquet
-import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.{SQLConf, QueryTest}
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.hive.test.TestHive
@@ -64,8 +64,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest {
}
}
- // TODO Re-enable this after data source insertion API is merged
- ignore(s"$prefix: INSERT OVERWRITE TABLE Parquet table") {
+ test(s"$prefix: INSERT OVERWRITE TABLE Parquet table") {
withParquetTable((1 to 10).map(i => (i, s"val_$i")), "t") {
withTempPath { file =>
sql("SELECT * FROM t LIMIT 1").saveAsParquetFile(file.getCanonicalPath)
@@ -81,4 +80,12 @@ class HiveParquetSuite extends QueryTest with ParquetTest {
}
}
}
+
+ withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "true") {
+ run("Parquet data source enabled")
+ }
+
+ withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "false") {
+ run("Parquet data source disabled")
+ }
}