aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorWenchen Fan <cloud0fan@outlook.com>2015-08-07 11:28:43 -0700
committerMichael Armbrust <michael@databricks.com>2015-08-07 11:28:43 -0700
commit2432c2e239f66049a7a7d7e0591204abcc993f1a (patch)
treea20991c4bd7497509feeef255aba0907cf300b20 /sql
parent76eaa701833a2ff23b50147d70ced41e85719572 (diff)
downloadspark-2432c2e239f66049a7a7d7e0591204abcc993f1a.tar.gz
spark-2432c2e239f66049a7a7d7e0591204abcc993f1a.tar.bz2
spark-2432c2e239f66049a7a7d7e0591204abcc993f1a.zip
[SPARK-8382] [SQL] Improve Analysis Unit test framework
Author: Wenchen Fan <cloud0fan@outlook.com> Closes #8025 from cloud-fan/analysis and squashes the following commits: 51461b1 [Wenchen Fan] move test file to test folder ec88ace [Wenchen Fan] Improve Analysis Unit test framework
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala48
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala55
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala33
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TestRelations.scala51
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala19
5 files changed, 79 insertions, 127 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
index 26935c6e3b..63b475b636 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
@@ -42,8 +42,8 @@ case class UnresolvedTestPlan() extends LeafNode {
override def output: Seq[Attribute] = Nil
}
-class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
- import AnalysisSuite._
+class AnalysisErrorSuite extends AnalysisTest with BeforeAndAfter {
+ import TestRelations._
def errorTest(
name: String,
@@ -51,15 +51,7 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
errorMessages: Seq[String],
caseSensitive: Boolean = true): Unit = {
test(name) {
- val error = intercept[AnalysisException] {
- if (caseSensitive) {
- caseSensitiveAnalyze(plan)
- } else {
- caseInsensitiveAnalyze(plan)
- }
- }
-
- errorMessages.foreach(m => assert(error.getMessage.toLowerCase.contains(m.toLowerCase)))
+ assertAnalysisError(plan, errorMessages, caseSensitive)
}
}
@@ -69,21 +61,21 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
"single invalid type, single arg",
testRelation.select(TestFunction(dateLit :: Nil, IntegerType :: Nil).as('a)),
"cannot resolve" :: "testfunction" :: "argument 1" :: "requires int type" ::
- "'null' is of date type" ::Nil)
+ "'null' is of date type" :: Nil)
errorTest(
"single invalid type, second arg",
testRelation.select(
TestFunction(dateLit :: dateLit :: Nil, DateType :: IntegerType :: Nil).as('a)),
"cannot resolve" :: "testfunction" :: "argument 2" :: "requires int type" ::
- "'null' is of date type" ::Nil)
+ "'null' is of date type" :: Nil)
errorTest(
"multiple invalid type",
testRelation.select(
TestFunction(dateLit :: dateLit :: Nil, IntegerType :: IntegerType :: Nil).as('a)),
"cannot resolve" :: "testfunction" :: "argument 1" :: "argument 2" ::
- "requires int type" :: "'null' is of date type" ::Nil)
+ "requires int type" :: "'null' is of date type" :: Nil)
errorTest(
"unresolved window function",
@@ -169,11 +161,7 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
assert(plan.resolved)
- val message = intercept[AnalysisException] {
- caseSensitiveAnalyze(plan)
- }.getMessage
-
- assert(message.contains("resolved attribute(s) a#1 missing from a#2"))
+ assertAnalysisError(plan, "resolved attribute(s) a#1 missing from a#2" :: Nil)
}
test("error test for self-join") {
@@ -194,10 +182,8 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
AttributeReference("a", BinaryType)(exprId = ExprId(2)),
AttributeReference("b", IntegerType)(exprId = ExprId(1))))
- val error = intercept[AnalysisException] {
- caseSensitiveAnalyze(plan)
- }
- assert(error.message.contains("binary type expression a cannot be used in grouping expression"))
+ assertAnalysisError(plan,
+ "binary type expression a cannot be used in grouping expression" :: Nil)
val plan2 =
Aggregate(
@@ -207,10 +193,8 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
AttributeReference("a", MapType(IntegerType, StringType))(exprId = ExprId(2)),
AttributeReference("b", IntegerType)(exprId = ExprId(1))))
- val error2 = intercept[AnalysisException] {
- caseSensitiveAnalyze(plan2)
- }
- assert(error2.message.contains("map type expression a cannot be used in grouping expression"))
+ assertAnalysisError(plan2,
+ "map type expression a cannot be used in grouping expression" :: Nil)
}
test("Join can't work on binary and map types") {
@@ -226,10 +210,7 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
Some(EqualTo(AttributeReference("a", BinaryType)(exprId = ExprId(2)),
AttributeReference("c", BinaryType)(exprId = ExprId(4)))))
- val error = intercept[AnalysisException] {
- caseSensitiveAnalyze(plan)
- }
- assert(error.message.contains("binary type expression a cannot be used in join conditions"))
+ assertAnalysisError(plan, "binary type expression a cannot be used in join conditions" :: Nil)
val plan2 =
Join(
@@ -243,9 +224,6 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
Some(EqualTo(AttributeReference("a", MapType(IntegerType, StringType))(exprId = ExprId(2)),
AttributeReference("c", MapType(IntegerType, StringType))(exprId = ExprId(4)))))
- val error2 = intercept[AnalysisException] {
- caseSensitiveAnalyze(plan2)
- }
- assert(error2.message.contains("map type expression a cannot be used in join conditions"))
+ assertAnalysisError(plan2, "map type expression a cannot be used in join conditions" :: Nil)
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 221b4e92f0..c944bc69e2 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -24,61 +24,8 @@ import org.apache.spark.sql.catalyst.SimpleCatalystConf
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
-// todo: remove this and use AnalysisTest instead.
-object AnalysisSuite {
- val caseSensitiveConf = new SimpleCatalystConf(true)
- val caseInsensitiveConf = new SimpleCatalystConf(false)
-
- val caseSensitiveCatalog = new SimpleCatalog(caseSensitiveConf)
- val caseInsensitiveCatalog = new SimpleCatalog(caseInsensitiveConf)
-
- val caseSensitiveAnalyzer =
- new Analyzer(caseSensitiveCatalog, EmptyFunctionRegistry, caseSensitiveConf) {
- override val extendedResolutionRules = EliminateSubQueries :: Nil
- }
- val caseInsensitiveAnalyzer =
- new Analyzer(caseInsensitiveCatalog, EmptyFunctionRegistry, caseInsensitiveConf) {
- override val extendedResolutionRules = EliminateSubQueries :: Nil
- }
-
- def caseSensitiveAnalyze(plan: LogicalPlan): Unit =
- caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer.execute(plan))
-
- def caseInsensitiveAnalyze(plan: LogicalPlan): Unit =
- caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer.execute(plan))
-
- val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
- val testRelation2 = LocalRelation(
- AttributeReference("a", StringType)(),
- AttributeReference("b", StringType)(),
- AttributeReference("c", DoubleType)(),
- AttributeReference("d", DecimalType(10, 2))(),
- AttributeReference("e", ShortType)())
-
- val nestedRelation = LocalRelation(
- AttributeReference("top", StructType(
- StructField("duplicateField", StringType) ::
- StructField("duplicateField", StringType) ::
- StructField("differentCase", StringType) ::
- StructField("differentcase", StringType) :: Nil
- ))())
-
- val nestedRelation2 = LocalRelation(
- AttributeReference("top", StructType(
- StructField("aField", StringType) ::
- StructField("bField", StringType) ::
- StructField("cField", StringType) :: Nil
- ))())
-
- val listRelation = LocalRelation(
- AttributeReference("list", ArrayType(IntegerType))())
-
- caseSensitiveCatalog.registerTable(Seq("TaBlE"), testRelation)
- caseInsensitiveCatalog.registerTable(Seq("TaBlE"), testRelation)
-}
-
-
class AnalysisSuite extends AnalysisTest {
+ import TestRelations._
test("union project *") {
val plan = (1 to 100)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
index fdb4f28950..ee1f8f5425 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
@@ -17,40 +17,11 @@
package org.apache.spark.sql.catalyst.analysis
-import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.SimpleCatalystConf
-import org.apache.spark.sql.types._
trait AnalysisTest extends PlanTest {
- val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
-
- val testRelation2 = LocalRelation(
- AttributeReference("a", StringType)(),
- AttributeReference("b", StringType)(),
- AttributeReference("c", DoubleType)(),
- AttributeReference("d", DecimalType(10, 2))(),
- AttributeReference("e", ShortType)())
-
- val nestedRelation = LocalRelation(
- AttributeReference("top", StructType(
- StructField("duplicateField", StringType) ::
- StructField("duplicateField", StringType) ::
- StructField("differentCase", StringType) ::
- StructField("differentcase", StringType) :: Nil
- ))())
-
- val nestedRelation2 = LocalRelation(
- AttributeReference("top", StructType(
- StructField("aField", StringType) ::
- StructField("bField", StringType) ::
- StructField("cField", StringType) :: Nil
- ))())
-
- val listRelation = LocalRelation(
- AttributeReference("list", ArrayType(IntegerType))())
val (caseSensitiveAnalyzer, caseInsensitiveAnalyzer) = {
val caseSensitiveConf = new SimpleCatalystConf(true)
@@ -59,8 +30,8 @@ trait AnalysisTest extends PlanTest {
val caseSensitiveCatalog = new SimpleCatalog(caseSensitiveConf)
val caseInsensitiveCatalog = new SimpleCatalog(caseInsensitiveConf)
- caseSensitiveCatalog.registerTable(Seq("TaBlE"), testRelation)
- caseInsensitiveCatalog.registerTable(Seq("TaBlE"), testRelation)
+ caseSensitiveCatalog.registerTable(Seq("TaBlE"), TestRelations.testRelation)
+ caseInsensitiveCatalog.registerTable(Seq("TaBlE"), TestRelations.testRelation)
new Analyzer(caseSensitiveCatalog, EmptyFunctionRegistry, caseSensitiveConf) {
override val extendedResolutionRules = EliminateSubQueries :: Nil
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TestRelations.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TestRelations.scala
new file mode 100644
index 0000000000..05b870705e
--- /dev/null
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TestRelations.scala
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.analysis
+
+import org.apache.spark.sql.catalyst.expressions.AttributeReference
+import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
+import org.apache.spark.sql.types._
+
+object TestRelations {
+ val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
+
+ val testRelation2 = LocalRelation(
+ AttributeReference("a", StringType)(),
+ AttributeReference("b", StringType)(),
+ AttributeReference("c", DoubleType)(),
+ AttributeReference("d", DecimalType(10, 2))(),
+ AttributeReference("e", ShortType)())
+
+ val nestedRelation = LocalRelation(
+ AttributeReference("top", StructType(
+ StructField("duplicateField", StringType) ::
+ StructField("duplicateField", StringType) ::
+ StructField("differentCase", StringType) ::
+ StructField("differentcase", StringType) :: Nil
+ ))())
+
+ val nestedRelation2 = LocalRelation(
+ AttributeReference("top", StructType(
+ StructField("aField", StringType) ::
+ StructField("bField", StringType) ::
+ StructField("cField", StringType) :: Nil
+ ))())
+
+ val listRelation = LocalRelation(
+ AttributeReference("list", ArrayType(IntegerType))())
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
index d4916ea8d2..1877cff133 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
@@ -17,7 +17,8 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.spark.sql.catalyst.analysis.{AnalysisSuite, EliminateSubQueries}
+import org.apache.spark.sql.catalyst.SimpleCatalystConf
+import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -88,20 +89,24 @@ class BooleanSimplificationSuite extends PlanTest with PredicateHelper {
('a === 'b || 'b > 3 && 'a > 3 && 'a < 5))
}
- private def caseInsensitiveAnalyse(plan: LogicalPlan) =
- AnalysisSuite.caseInsensitiveAnalyzer.execute(plan)
+ private val caseInsensitiveAnalyzer =
+ new Analyzer(EmptyCatalog, EmptyFunctionRegistry, new SimpleCatalystConf(false))
test("(a && b) || (a && c) => a && (b || c) when case insensitive") {
- val plan = caseInsensitiveAnalyse(testRelation.where(('a > 2 && 'b > 3) || ('A > 2 && 'b < 5)))
+ val plan = caseInsensitiveAnalyzer.execute(
+ testRelation.where(('a > 2 && 'b > 3) || ('A > 2 && 'b < 5)))
val actual = Optimize.execute(plan)
- val expected = caseInsensitiveAnalyse(testRelation.where('a > 2 && ('b > 3 || 'b < 5)))
+ val expected = caseInsensitiveAnalyzer.execute(
+ testRelation.where('a > 2 && ('b > 3 || 'b < 5)))
comparePlans(actual, expected)
}
test("(a || b) && (a || c) => a || (b && c) when case insensitive") {
- val plan = caseInsensitiveAnalyse(testRelation.where(('a > 2 || 'b > 3) && ('A > 2 || 'b < 5)))
+ val plan = caseInsensitiveAnalyzer.execute(
+ testRelation.where(('a > 2 || 'b > 3) && ('A > 2 || 'b < 5)))
val actual = Optimize.execute(plan)
- val expected = caseInsensitiveAnalyse(testRelation.where('a > 2 || ('b > 3 && 'b < 5)))
+ val expected = caseInsensitiveAnalyzer.execute(
+ testRelation.where('a > 2 || ('b > 3 && 'b < 5)))
comparePlans(actual, expected)
}
}