aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorYin Huai <huai@cse.ohio-state.edu>2014-07-31 13:05:24 -0700
committerMichael Armbrust <michael@databricks.com>2014-07-31 13:05:24 -0700
commit49b361298b09d415de1857846367913495aecfa6 (patch)
tree73ac18986560c80d2544339c88628632b9616c02 /sql
parentdc0865bc7e119fe507061c27069c17523b87dfea (diff)
downloadspark-49b361298b09d415de1857846367913495aecfa6.tar.gz
spark-49b361298b09d415de1857846367913495aecfa6.tar.bz2
spark-49b361298b09d415de1857846367913495aecfa6.zip
[SPARK-2523] [SQL] Hadoop table scan bug fixing (fix failing Jenkins maven test)
This PR tries to resolve the broken Jenkins maven test issue introduced by #1439. Now, we create a single query test to run both the setup work and the test query. Author: Yin Huai <huai@cse.ohio-state.edu> Closes #1669 from yhuai/SPARK-2523-fixTest and squashes the following commits: 358af1a [Yin Huai] Make partition_based_table_scan_with_different_serde run atomically.
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-0-1436cccda63b78dd6e43a399da6cc4740
-rw-r--r--sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-1-8d9bf54373f45bc35f8cb6e82771b1540
-rw-r--r--sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-2-7816c17905012cf381abf93d230faa8d0
-rw-r--r--sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-3-90089a6db3c3d8ee5ff5ea6b9153b3cc0
-rw-r--r--sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-4-8caed2a6e80250a6d38a59388679c298 (renamed from sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-0-8caed2a6e80250a6d38a59388679c298)0
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala45
6 files changed, 19 insertions, 26 deletions
diff --git a/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-0-1436cccda63b78dd6e43a399da6cc474 b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-0-1436cccda63b78dd6e43a399da6cc474
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-0-1436cccda63b78dd6e43a399da6cc474
diff --git a/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-1-8d9bf54373f45bc35f8cb6e82771b154 b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-1-8d9bf54373f45bc35f8cb6e82771b154
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-1-8d9bf54373f45bc35f8cb6e82771b154
diff --git a/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-2-7816c17905012cf381abf93d230faa8d b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-2-7816c17905012cf381abf93d230faa8d
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-2-7816c17905012cf381abf93d230faa8d
diff --git a/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-3-90089a6db3c3d8ee5ff5ea6b9153b3cc b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-3-90089a6db3c3d8ee5ff5ea6b9153b3cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-3-90089a6db3c3d8ee5ff5ea6b9153b3cc
diff --git a/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-0-8caed2a6e80250a6d38a59388679c298 b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-4-8caed2a6e80250a6d38a59388679c298
index f369f21e18..f369f21e18 100644
--- a/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-0-8caed2a6e80250a6d38a59388679c298
+++ b/sql/hive/src/test/resources/golden/partition_based_table_scan_with_different_serde-4-8caed2a6e80250a6d38a59388679c298
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
index bcb00f871d..c5736723b4 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
@@ -17,32 +17,25 @@
package org.apache.spark.sql.hive.execution
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
-
-import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.sql.hive.test.TestHive
-
class HiveTableScanSuite extends HiveComparisonTest {
- // MINOR HACK: You must run a query before calling reset the first time.
- TestHive.hql("SHOW TABLES")
- TestHive.reset()
-
- TestHive.hql("""CREATE TABLE part_scan_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
- | ROW FORMAT SERDE
- | 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'
- | STORED AS RCFILE
- """.stripMargin)
- TestHive.hql("""FROM src
- | INSERT INTO TABLE part_scan_test PARTITION (ds='2010-01-01')
- | SELECT 100,100 LIMIT 1
- """.stripMargin)
- TestHive.hql("""ALTER TABLE part_scan_test SET SERDE
- | 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
- """.stripMargin)
- TestHive.hql("""FROM src INSERT INTO TABLE part_scan_test PARTITION (ds='2010-01-02')
- | SELECT 200,200 LIMIT 1
- """.stripMargin)
- createQueryTest("partition_based_table_scan_with_different_serde",
- "SELECT * from part_scan_test", false)
+ createQueryTest("partition_based_table_scan_with_different_serde",
+ """
+ |CREATE TABLE part_scan_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
+ |ROW FORMAT SERDE
+ |'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'
+ |STORED AS RCFILE;
+ |
+ |FROM src
+ |INSERT INTO TABLE part_scan_test PARTITION (ds='2010-01-01')
+ |SELECT 100,100 LIMIT 1;
+ |
+ |ALTER TABLE part_scan_test SET SERDE
+ |'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
+ |
+ |FROM src INSERT INTO TABLE part_scan_test PARTITION (ds='2010-01-02')
+ |SELECT 200,200 LIMIT 1;
+ |
+ |SELECT * from part_scan_test;
+ """.stripMargin)
}