aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2017-02-06 23:10:05 +0800
committerWenchen Fan <wenchen@databricks.com>2017-02-06 23:10:05 +0800
commit0f16ff5b0ec8cd828774ba5ddb276d7b06dbe273 (patch)
treea6fa93691490efb63708f1ef7331f8cfc0a27365 /sql
parent7beb227cc8a4674e24cb1aaa278287ecc8194e5d (diff)
downloadspark-0f16ff5b0ec8cd828774ba5ddb276d7b06dbe273.tar.gz
spark-0f16ff5b0ec8cd828774ba5ddb276d7b06dbe273.tar.bz2
spark-0f16ff5b0ec8cd828774ba5ddb276d7b06dbe273.zip
[SPARK-17213][SQL][FOLLOWUP] Re-enable Parquet filter tests for binary and string
## What changes were proposed in this pull request? This PR proposes to enable the tests for Parquet filter pushdown with binary and string. This was disabled in https://github.com/apache/spark/pull/16106 due to Parquet's issue but it is now revived in https://github.com/apache/spark/pull/16791 after upgrading Parquet to 1.8.2. ## How was this patch tested? Manually tested `ParquetFilterSuite` via IDE. Author: hyukjinkwon <gurwls223@gmail.com> Closes #16817 from HyukjinKwon/SPARK-17213.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala6
1 files changed, 2 insertions, 4 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
index fa046c808e..9a3328fcec 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
@@ -229,8 +229,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
}
}
- // See SPARK-17213: https://issues.apache.org/jira/browse/SPARK-17213
- ignore("filter pushdown - string") {
+ test("filter pushdown - string") {
withParquetDataFrame((1 to 4).map(i => Tuple1(i.toString))) { implicit df =>
checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(
@@ -258,8 +257,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
}
}
- // See SPARK-17213: https://issues.apache.org/jira/browse/SPARK-17213
- ignore("filter pushdown - binary") {
+ test("filter pushdown - binary") {
implicit class IntToBinary(int: Int) {
def b: Array[Byte] = int.toString.getBytes(StandardCharsets.UTF_8)
}