aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2015-10-30 20:05:07 -0700
committerYin Huai <yhuai@databricks.com>2015-10-30 20:05:07 -0700
commit3c471885dc4f86bea95ab542e0d48d22ae748404 (patch)
treefbb600c5ae2d1c39dc6238c9885c2e85f7f83a7f /sql/core
parent69b9e4b3c2f929e3df55f5e71875c03bb9712948 (diff)
downloadspark-3c471885dc4f86bea95ab542e0d48d22ae748404.tar.gz
spark-3c471885dc4f86bea95ab542e0d48d22ae748404.tar.bz2
spark-3c471885dc4f86bea95ab542e0d48d22ae748404.zip
[SPARK-11434][SPARK-11103][SQL] Fix test ": Filter applied on merged Parquet schema with new column fails"
https://issues.apache.org/jira/browse/SPARK-11434 Author: Yin Huai <yhuai@databricks.com> Closes #9387 from yhuai/SPARK-11434.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala6
1 files changed, 3 insertions, 3 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
index b2101beb92..f88ddc77a6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
@@ -323,15 +323,15 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_SCHEMA_MERGING_ENABLED.key -> "true") {
withTempPath { dir =>
- var pathOne = s"${dir.getCanonicalPath}/table1"
+ val pathOne = s"${dir.getCanonicalPath}/table1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(pathOne)
- var pathTwo = s"${dir.getCanonicalPath}/table2"
+ val pathTwo = s"${dir.getCanonicalPath}/table2"
(1 to 3).map(i => (i, i.toString)).toDF("c", "b").write.parquet(pathTwo)
// If the "c = 1" filter gets pushed down, this query will throw an exception which
// Parquet emits. This is a Parquet issue (PARQUET-389).
checkAnswer(
- sqlContext.read.parquet(pathOne, pathTwo).filter("c = 1"),
+ sqlContext.read.parquet(pathOne, pathTwo).filter("c = 1").selectExpr("c", "b", "a"),
(1 to 1).map(i => Row(i, i.toString, null)))
}
}