aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-04-22 11:18:01 -0700
committerReynold Xin <rxin@databricks.com>2015-04-22 11:18:01 -0700
commitcdf0328684f70ddcd49b23c23c1532aeb9caa44e (patch)
tree7829448993c87df6aed3ab97ea88d6dc9cc814c2 /sql
parent33b85620f910c404873d362d27cca1223084913a (diff)
downloadspark-cdf0328684f70ddcd49b23c23c1532aeb9caa44e.tar.gz
spark-cdf0328684f70ddcd49b23c23c1532aeb9caa44e.tar.bz2
spark-cdf0328684f70ddcd49b23c23c1532aeb9caa44e.zip
[SQL] Rename some apply functions.
I was looking at the code gen code and got confused by a few of use cases of apply, in particular apply on objects. So I went ahead and changed a few of them. Hopefully slightly more clear with a proper verb. Author: Reynold Xin <rxin@databricks.com> Closes #5624 from rxin/apply-rename and squashes the following commits: ee45034 [Reynold Xin] [SQL] Rename some apply functions.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala9
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala22
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala10
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala8
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala14
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala52
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala8
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala8
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala7
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala12
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala10
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala2
35 files changed, 117 insertions, 117 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
index 3823584287..1f3c02478b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
@@ -32,7 +32,7 @@ private[sql] object KeywordNormalizer {
private[sql] abstract class AbstractSparkSQLParser
extends StandardTokenParsers with PackratParsers {
- def apply(input: String): LogicalPlan = {
+ def parse(input: String): LogicalPlan = {
// Initialize the Keywords.
lexical.initialize(reservedWords)
phrase(start)(new lexical.Scanner(input)) match {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index 4e5c64bb63..5d5aba9644 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -296,7 +296,7 @@ package object dsl {
InsertIntoTable(
analysis.UnresolvedRelation(Seq(tableName)), Map.empty, logicalPlan, overwrite, false)
- def analyze: LogicalPlan = EliminateSubQueries(analysis.SimpleAnalyzer(logicalPlan))
+ def analyze: LogicalPlan = EliminateSubQueries(analysis.SimpleAnalyzer.execute(logicalPlan))
}
object plans { // scalastyle:ignore
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index be2c101d63..eeffedb558 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -98,11 +98,11 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
})
/** Generates the requested evaluator binding the given expression(s) to the inputSchema. */
- def apply(expressions: InType, inputSchema: Seq[Attribute]): OutType =
- apply(bind(expressions, inputSchema))
+ def generate(expressions: InType, inputSchema: Seq[Attribute]): OutType =
+ generate(bind(expressions, inputSchema))
/** Generates the requested evaluator given already bound expression(s). */
- def apply(expressions: InType): OutType = cache.get(canonicalize(expressions))
+ def generate(expressions: InType): OutType = cache.get(canonicalize(expressions))
/**
* Returns a term name that is unique within this instance of a `CodeGenerator`.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
index a419fd7ecb..840260703a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
@@ -30,7 +30,7 @@ object GenerateMutableProjection extends CodeGenerator[Seq[Expression], () => Mu
val mutableRowName = newTermName("mutableRow")
protected def canonicalize(in: Seq[Expression]): Seq[Expression] =
- in.map(ExpressionCanonicalizer(_))
+ in.map(ExpressionCanonicalizer.execute)
protected def bind(in: Seq[Expression], inputSchema: Seq[Attribute]): Seq[Expression] =
in.map(BindReferences.bindReference(_, inputSchema))
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
index fc2a2b6070..b129c0d898 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
@@ -30,7 +30,7 @@ object GenerateOrdering extends CodeGenerator[Seq[SortOrder], Ordering[Row]] wit
import scala.reflect.runtime.universe._
protected def canonicalize(in: Seq[SortOrder]): Seq[SortOrder] =
- in.map(ExpressionCanonicalizer(_).asInstanceOf[SortOrder])
+ in.map(ExpressionCanonicalizer.execute(_).asInstanceOf[SortOrder])
protected def bind(in: Seq[SortOrder], inputSchema: Seq[Attribute]): Seq[SortOrder] =
in.map(BindReferences.bindReference(_, inputSchema))
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
index 2a0935c790..40e1630243 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
@@ -26,7 +26,7 @@ object GeneratePredicate extends CodeGenerator[Expression, (Row) => Boolean] {
import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.universe._
- protected def canonicalize(in: Expression): Expression = ExpressionCanonicalizer(in)
+ protected def canonicalize(in: Expression): Expression = ExpressionCanonicalizer.execute(in)
protected def bind(in: Expression, inputSchema: Seq[Attribute]): Expression =
BindReferences.bindReference(in, inputSchema)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
index 6f572ff959..d491babc2b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
@@ -31,7 +31,7 @@ object GenerateProjection extends CodeGenerator[Seq[Expression], Projection] {
import scala.reflect.runtime.universe._
protected def canonicalize(in: Seq[Expression]): Seq[Expression] =
- in.map(ExpressionCanonicalizer(_))
+ in.map(ExpressionCanonicalizer.execute)
protected def bind(in: Seq[Expression], inputSchema: Seq[Attribute]): Seq[Expression] =
in.map(BindReferences.bindReference(_, inputSchema))
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index fcd6352079..46522eb9c1 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -23,10 +23,10 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.types.{DataType, BinaryType, BooleanType, NativeType}
object InterpretedPredicate {
- def apply(expression: Expression, inputSchema: Seq[Attribute]): (Row => Boolean) =
- apply(BindReferences.bindReference(expression, inputSchema))
+ def create(expression: Expression, inputSchema: Seq[Attribute]): (Row => Boolean) =
+ create(BindReferences.bindReference(expression, inputSchema))
- def apply(expression: Expression): (Row => Boolean) = {
+ def create(expression: Expression): (Row => Boolean) = {
(r: Row) => expression.eval(r).asInstanceOf[Boolean]
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
index c441f0bf24..3f9858b0c4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
@@ -45,7 +45,7 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
* Executes the batches of rules defined by the subclass. The batches are executed serially
* using the defined execution strategy. Within each batch, rules are also executed serially.
*/
- def apply(plan: TreeType): TreeType = {
+ def execute(plan: TreeType): TreeType = {
var curPlan = plan
batches.foreach { batch =>
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
index 1a0a0e6154..a652c70560 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
@@ -49,13 +49,14 @@ class SqlParserSuite extends FunSuite {
test("test long keyword") {
val parser = new SuperLongKeywordTestParser
- assert(TestCommand("NotRealCommand") === parser("ThisIsASuperLongKeyWordTest NotRealCommand"))
+ assert(TestCommand("NotRealCommand") ===
+ parser.parse("ThisIsASuperLongKeyWordTest NotRealCommand"))
}
test("test case insensitive") {
val parser = new CaseInsensitiveTestParser
- assert(TestCommand("NotRealCommand") === parser("EXECUTE NotRealCommand"))
- assert(TestCommand("NotRealCommand") === parser("execute NotRealCommand"))
- assert(TestCommand("NotRealCommand") === parser("exEcute NotRealCommand"))
+ assert(TestCommand("NotRealCommand") === parser.parse("EXECUTE NotRealCommand"))
+ assert(TestCommand("NotRealCommand") === parser.parse("execute NotRealCommand"))
+ assert(TestCommand("NotRealCommand") === parser.parse("exEcute NotRealCommand"))
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 7c249215bd..971e1ff5ec 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -42,10 +42,10 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
def caseSensitiveAnalyze(plan: LogicalPlan): Unit =
- caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer(plan))
+ caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer.execute(plan))
def caseInsensitiveAnalyze(plan: LogicalPlan): Unit =
- caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer(plan))
+ caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer.execute(plan))
val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
val testRelation2 = LocalRelation(
@@ -82,7 +82,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
a.select(UnresolvedStar(None)).select('a).unionAll(b.select(UnresolvedStar(None)))
}
- assert(caseInsensitiveAnalyzer(plan).resolved)
+ assert(caseInsensitiveAnalyzer.execute(plan).resolved)
}
test("check project's resolved") {
@@ -98,11 +98,11 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
test("analyze project") {
assert(
- caseSensitiveAnalyzer(Project(Seq(UnresolvedAttribute("a")), testRelation)) ===
+ caseSensitiveAnalyzer.execute(Project(Seq(UnresolvedAttribute("a")), testRelation)) ===
Project(testRelation.output, testRelation))
assert(
- caseSensitiveAnalyzer(
+ caseSensitiveAnalyzer.execute(
Project(Seq(UnresolvedAttribute("TbL.a")),
UnresolvedRelation(Seq("TaBlE"), Some("TbL")))) ===
Project(testRelation.output, testRelation))
@@ -115,13 +115,13 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
assert(e.getMessage().toLowerCase.contains("cannot resolve"))
assert(
- caseInsensitiveAnalyzer(
+ caseInsensitiveAnalyzer.execute(
Project(Seq(UnresolvedAttribute("TbL.a")),
UnresolvedRelation(Seq("TaBlE"), Some("TbL")))) ===
Project(testRelation.output, testRelation))
assert(
- caseInsensitiveAnalyzer(
+ caseInsensitiveAnalyzer.execute(
Project(Seq(UnresolvedAttribute("tBl.a")),
UnresolvedRelation(Seq("TaBlE"), Some("TbL")))) ===
Project(testRelation.output, testRelation))
@@ -134,13 +134,13 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
assert(e.getMessage == "Table Not Found: tAbLe")
assert(
- caseSensitiveAnalyzer(UnresolvedRelation(Seq("TaBlE"), None)) === testRelation)
+ caseSensitiveAnalyzer.execute(UnresolvedRelation(Seq("TaBlE"), None)) === testRelation)
assert(
- caseInsensitiveAnalyzer(UnresolvedRelation(Seq("tAbLe"), None)) === testRelation)
+ caseInsensitiveAnalyzer.execute(UnresolvedRelation(Seq("tAbLe"), None)) === testRelation)
assert(
- caseInsensitiveAnalyzer(UnresolvedRelation(Seq("TaBlE"), None)) === testRelation)
+ caseInsensitiveAnalyzer.execute(UnresolvedRelation(Seq("TaBlE"), None)) === testRelation)
}
def errorTest(
@@ -219,7 +219,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
AttributeReference("d", DecimalType.Unlimited)(),
AttributeReference("e", ShortType)())
- val plan = caseInsensitiveAnalyzer(
+ val plan = caseInsensitiveAnalyzer.execute(
testRelation2.select(
'a / Literal(2) as 'div1,
'a / 'b as 'div2,
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
index 67bec999df..36b03d1c65 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
@@ -48,12 +48,12 @@ class DecimalPrecisionSuite extends FunSuite with BeforeAndAfter {
private def checkType(expression: Expression, expectedType: DataType): Unit = {
val plan = Project(Seq(Alias(expression, "c")()), relation)
- assert(analyzer(plan).schema.fields(0).dataType === expectedType)
+ assert(analyzer.execute(plan).schema.fields(0).dataType === expectedType)
}
private def checkComparison(expression: Expression, expectedType: DataType): Unit = {
val plan = Project(Alias(expression, "c")() :: Nil, relation)
- val comparison = analyzer(plan).collect {
+ val comparison = analyzer.execute(plan).collect {
case Project(Alias(e: BinaryComparison, _) :: Nil, _) => e
}.head
assert(comparison.left.dataType === expectedType)
@@ -64,7 +64,7 @@ class DecimalPrecisionSuite extends FunSuite with BeforeAndAfter {
val plan =
Union(Project(Seq(Alias(left, "l")()), relation),
Project(Seq(Alias(right, "r")()), relation))
- val (l, r) = analyzer(plan).collect {
+ val (l, r) = analyzer.execute(plan).collect {
case Union(left, right) => (left.output.head, right.output.head)
}.head
assert(l.dataType === expectedType)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
index ef3114fd4d..b5ebe4b38e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
@@ -29,7 +29,7 @@ class GeneratedEvaluationSuite extends ExpressionEvaluationSuite {
expected: Any,
inputRow: Row = EmptyRow): Unit = {
val plan = try {
- GenerateMutableProjection(Alias(expression, s"Optimized($expression)")() :: Nil)()
+ GenerateMutableProjection.generate(Alias(expression, s"Optimized($expression)")() :: Nil)()
} catch {
case e: Throwable =>
val evaluated = GenerateProjection.expressionEvaluator(expression)
@@ -56,10 +56,10 @@ class GeneratedEvaluationSuite extends ExpressionEvaluationSuite {
val futures = (1 to 20).map { _ =>
future {
- GeneratePredicate(EqualTo(Literal(1), Literal(1)))
- GenerateProjection(EqualTo(Literal(1), Literal(1)) :: Nil)
- GenerateMutableProjection(EqualTo(Literal(1), Literal(1)) :: Nil)
- GenerateOrdering(Add(Literal(1), Literal(1)).asc :: Nil)
+ GeneratePredicate.generate(EqualTo(Literal(1), Literal(1)))
+ GenerateProjection.generate(EqualTo(Literal(1), Literal(1)) :: Nil)
+ GenerateMutableProjection.generate(EqualTo(Literal(1), Literal(1)) :: Nil)
+ GenerateOrdering.generate(Add(Literal(1), Literal(1)).asc :: Nil)
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
index bcc0c404d2..97af2e0fd0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
@@ -25,13 +25,13 @@ import org.apache.spark.sql.catalyst.expressions.codegen._
*/
class GeneratedMutableEvaluationSuite extends ExpressionEvaluationSuite {
override def checkEvaluation(
- expression: Expression,
- expected: Any,
- inputRow: Row = EmptyRow): Unit = {
+ expression: Expression,
+ expected: Any,
+ inputRow: Row = EmptyRow): Unit = {
lazy val evaluated = GenerateProjection.expressionEvaluator(expression)
val plan = try {
- GenerateProjection(Alias(expression, s"Optimized($expression)")() :: Nil)
+ GenerateProjection.generate(Alias(expression, s"Optimized($expression)")() :: Nil)
} catch {
case e: Throwable =>
fail(
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
index 72f06e26e0..6255578d7f 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
@@ -61,7 +61,7 @@ class BooleanSimplificationSuite extends PlanTest with PredicateHelper {
def checkCondition(input: Expression, expected: Expression): Unit = {
val plan = testRelation.where(input).analyze
- val actual = Optimize(plan).expressions.head
+ val actual = Optimize.execute(plan).expressions.head
compareConditions(actual, expected)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
index e2ae0d25db..2d16d668fd 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
@@ -44,7 +44,7 @@ class CombiningLimitsSuite extends PlanTest {
.limit(10)
.limit(5)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select('a)
@@ -61,7 +61,7 @@ class CombiningLimitsSuite extends PlanTest {
.limit(7)
.limit(5)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select('a)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
index 4396bd0dda..14b28e8402 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
@@ -47,7 +47,7 @@ class ConstantFoldingSuite extends PlanTest {
.subquery('y)
.select('a)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select('a.attr)
@@ -74,7 +74,7 @@ class ConstantFoldingSuite extends PlanTest {
Literal(2) * Literal(3) - Literal(6) / (Literal(4) - Literal(2))
)(Literal(9) / Literal(3) as Symbol("9/3"))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
@@ -99,7 +99,7 @@ class ConstantFoldingSuite extends PlanTest {
Literal(2) * 'a + Literal(4) as Symbol("c3"),
'a * (Literal(3) + Literal(4)) as Symbol("c4"))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
@@ -127,7 +127,7 @@ class ConstantFoldingSuite extends PlanTest {
(Literal(1) === Literal(1) || 'b > 1) &&
(Literal(1) === Literal(2) || 'b < 10)))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
@@ -144,7 +144,7 @@ class ConstantFoldingSuite extends PlanTest {
Cast(Literal("2"), IntegerType) + Literal(3) + 'a as Symbol("c1"),
Coalesce(Seq(Cast(Literal("abc"), IntegerType), Literal(3))) as Symbol("c2"))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
@@ -163,7 +163,7 @@ class ConstantFoldingSuite extends PlanTest {
Rand + Literal(1) as Symbol("c1"),
Sum('a) as Symbol("c2"))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
@@ -210,7 +210,7 @@ class ConstantFoldingSuite extends PlanTest {
Contains("abc", Literal.create(null, StringType)) as 'c20
)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
index cf42d43823..6841bd9890 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
@@ -49,7 +49,7 @@ class ConvertToLocalRelationSuite extends PlanTest {
UnresolvedAttribute("a").as("a1"),
(UnresolvedAttribute("b") + 1).as("b1"))
- val optimized = Optimize(projectOnLocal.analyze)
+ val optimized = Optimize.execute(projectOnLocal.analyze)
comparePlans(optimized, correctAnswer)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
index 2f3704be59..a4a3a66b8b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
@@ -30,7 +30,7 @@ class ExpressionOptimizationSuite extends ExpressionEvaluationSuite {
expected: Any,
inputRow: Row = EmptyRow): Unit = {
val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation)
- val optimizedPlan = DefaultOptimizer(plan)
+ val optimizedPlan = DefaultOptimizer.execute(plan)
super.checkEvaluation(optimizedPlan.expressions.head, expected, inputRow)
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
index 45cf695d20..aa9708b164 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
@@ -50,7 +50,7 @@ class FilterPushdownSuite extends PlanTest {
.subquery('y)
.select('a)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select('a.attr)
@@ -65,7 +65,7 @@ class FilterPushdownSuite extends PlanTest {
.groupBy('a)('a, Count('b))
.select('a)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select('a)
@@ -81,7 +81,7 @@ class FilterPushdownSuite extends PlanTest {
.groupBy('a)('a as 'c, Count('b))
.select('c)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select('a)
@@ -98,7 +98,7 @@ class FilterPushdownSuite extends PlanTest {
.select('a)
.where('a === 1)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a === 1)
@@ -115,7 +115,7 @@ class FilterPushdownSuite extends PlanTest {
.where('e === 1)
.analyze
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a + 'b === 1)
@@ -131,7 +131,7 @@ class FilterPushdownSuite extends PlanTest {
.where('a === 1)
.where('a === 2)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a === 1 && 'a === 2)
@@ -152,7 +152,7 @@ class FilterPushdownSuite extends PlanTest {
.where("y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val right = testRelation.where('b === 2)
val correctAnswer =
@@ -170,7 +170,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 1)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val right = testRelation
val correctAnswer =
@@ -188,7 +188,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 1 && "y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val right = testRelation.where('b === 2)
val correctAnswer =
@@ -206,7 +206,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 1 && "y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val correctAnswer =
left.join(y, LeftOuter).where("y.b".attr === 2).analyze
@@ -223,7 +223,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 1 && "y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val right = testRelation.where('b === 2).subquery('d)
val correctAnswer =
x.join(right, RightOuter).where("x.b".attr === 1).analyze
@@ -240,7 +240,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('d)
val correctAnswer =
left.join(y, LeftOuter, Some("d.b".attr === 1)).where("y.b".attr === 2).analyze
@@ -257,7 +257,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val right = testRelation.where('b === 2).subquery('d)
val correctAnswer =
x.join(right, RightOuter, Some("d.b".attr === 1)).where("x.b".attr === 2).analyze
@@ -274,7 +274,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('l)
val right = testRelation.where('b === 1).subquery('r)
val correctAnswer =
@@ -292,7 +292,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val right = testRelation.where('b === 2).subquery('r)
val correctAnswer =
x.join(right, RightOuter, Some("r.b".attr === 1)).where("x.b".attr === 2).analyze
@@ -309,7 +309,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('l)
val right = testRelation.where('b === 1).subquery('r)
val correctAnswer =
@@ -327,7 +327,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.subquery('l)
val right = testRelation.where('b === 2).subquery('r)
val correctAnswer =
@@ -346,7 +346,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('l)
val right = testRelation.where('b === 1).subquery('r)
val correctAnswer =
@@ -365,7 +365,7 @@ class FilterPushdownSuite extends PlanTest {
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a === 3).subquery('l)
val right = testRelation.where('b === 2).subquery('r)
val correctAnswer =
@@ -382,7 +382,7 @@ class FilterPushdownSuite extends PlanTest {
val originalQuery = {
x.join(y, condition = Some("x.b".attr === "y.b".attr))
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
comparePlans(analysis.EliminateSubQueries(originalQuery.analyze), optimized)
}
@@ -396,7 +396,7 @@ class FilterPushdownSuite extends PlanTest {
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) && ("y.a".attr === 1))
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a === 1).subquery('x)
val right = testRelation.where('a === 1).subquery('y)
val correctAnswer =
@@ -415,7 +415,7 @@ class FilterPushdownSuite extends PlanTest {
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1))
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a === 1).subquery('x)
val right = testRelation.subquery('y)
val correctAnswer =
@@ -436,7 +436,7 @@ class FilterPushdownSuite extends PlanTest {
("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val lleft = testRelation.where('a >= 3).subquery('z)
val left = testRelation.where('a === 1).subquery('x)
val right = testRelation.subquery('y)
@@ -457,7 +457,7 @@ class FilterPushdownSuite extends PlanTest {
.generate(Explode('c_arr), true, false, Some("arr"))
.where(('b >= 5) && ('a > 6))
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = {
testRelationWithArrayType
.where(('b >= 5) && ('a > 6))
@@ -474,7 +474,7 @@ class FilterPushdownSuite extends PlanTest {
.generate(generator, true, false, Some("arr"))
.where(('b >= 5) && ('c > 6))
}
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val referenceResult = {
testRelationWithArrayType
.where('b >= 5)
@@ -502,7 +502,7 @@ class FilterPushdownSuite extends PlanTest {
.generate(Explode('c_arr), true, false, Some("arr"))
.where(('c > 6) || ('b > 5)).analyze
}
- val optimized = Optimize(originalQuery)
+ val optimized = Optimize.execute(originalQuery)
comparePlans(optimized, originalQuery)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
index b10577c800..b3df487c84 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
@@ -41,7 +41,7 @@ class LikeSimplificationSuite extends PlanTest {
testRelation
.where(('a like "abc%") || ('a like "abc\\%"))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where(StartsWith('a, "abc") || ('a like "abc\\%"))
.analyze
@@ -54,7 +54,7 @@ class LikeSimplificationSuite extends PlanTest {
testRelation
.where('a like "%xyz")
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where(EndsWith('a, "xyz"))
.analyze
@@ -67,7 +67,7 @@ class LikeSimplificationSuite extends PlanTest {
testRelation
.where(('a like "%mn%") || ('a like "%mn\\%"))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where(Contains('a, "mn") || ('a like "%mn\\%"))
.analyze
@@ -80,7 +80,7 @@ class LikeSimplificationSuite extends PlanTest {
testRelation
.where(('a like "") || ('a like "abc"))
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where(('a === "") || ('a === "abc"))
.analyze
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
index 966bc9ada1..3eb399e68e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
@@ -49,7 +49,7 @@ class OptimizeInSuite extends PlanTest {
.where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2))))
.analyze
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where(InSet(UnresolvedAttribute("a"), HashSet[Any]() + 1 + 2))
@@ -64,7 +64,7 @@ class OptimizeInSuite extends PlanTest {
.where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2), UnresolvedAttribute("b"))))
.analyze
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2), UnresolvedAttribute("b"))))
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
index 22992fb6f5..6b1e53cd42 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
@@ -41,7 +41,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest {
testRelation
.select(Upper(Upper('a)) as 'u)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select(Upper('a) as 'u)
@@ -55,7 +55,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest {
testRelation
.select(Upper(Lower('a)) as 'u)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select(Upper('a) as 'u)
@@ -69,7 +69,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest {
testRelation
.select(Lower(Upper('a)) as 'l)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.select(Lower('a) as 'l)
.analyze
@@ -82,7 +82,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest {
testRelation
.select(Lower(Lower('a)) as 'l)
- val optimized = Optimize(originalQuery.analyze)
+ val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.select(Lower('a) as 'l)
.analyze
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
index a54751dfa9..a3ad200800 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
@@ -17,10 +17,9 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
-import org.apache.spark.sql.catalyst.plans.{PlanTest, LeftOuter, RightOuter}
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.dsl.expressions._
@@ -41,7 +40,7 @@ class UnionPushdownSuite extends PlanTest {
test("union: filter to each side") {
val query = testUnion.where('a === 1)
- val optimized = Optimize(query.analyze)
+ val optimized = Optimize.execute(query.analyze)
val correctAnswer =
Union(testRelation.where('a === 1), testRelation2.where('d === 1)).analyze
@@ -52,7 +51,7 @@ class UnionPushdownSuite extends PlanTest {
test("union: project to each side") {
val query = testUnion.select('b)
- val optimized = Optimize(query.analyze)
+ val optimized = Optimize.execute(query.analyze)
val correctAnswer =
Union(testRelation.select('b), testRelation2.select('e)).analyze
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
index 4b2d455840..2a641c63f8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
@@ -34,7 +34,7 @@ class RuleExecutorSuite extends FunSuite {
val batches = Batch("once", Once, DecrementLiterals) :: Nil
}
- assert(ApplyOnce(Literal(10)) === Literal(9))
+ assert(ApplyOnce.execute(Literal(10)) === Literal(9))
}
test("to fixed point") {
@@ -42,7 +42,7 @@ class RuleExecutorSuite extends FunSuite {
val batches = Batch("fixedPoint", FixedPoint(100), DecrementLiterals) :: Nil
}
- assert(ToFixedPoint(Literal(10)) === Literal(0))
+ assert(ToFixedPoint.execute(Literal(10)) === Literal(0))
}
test("to maxIterations") {
@@ -50,6 +50,6 @@ class RuleExecutorSuite extends FunSuite {
val batches = Batch("fixedPoint", FixedPoint(10), DecrementLiterals) :: Nil
}
- assert(ToFixedPoint(Literal(100)) === Literal(90))
+ assert(ToFixedPoint.execute(Literal(100)) === Literal(90))
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index bcd20c06c6..a279b0f07c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -132,16 +132,16 @@ class SQLContext(@transient val sparkContext: SparkContext)
protected[sql] lazy val optimizer: Optimizer = DefaultOptimizer
@transient
- protected[sql] val ddlParser = new DDLParser(sqlParser.apply(_))
+ protected[sql] val ddlParser = new DDLParser(sqlParser.parse(_))
@transient
protected[sql] val sqlParser = {
val fallback = new catalyst.SqlParser
- new SparkSQLParser(fallback(_))
+ new SparkSQLParser(fallback.parse(_))
}
protected[sql] def parseSql(sql: String): LogicalPlan = {
- ddlParser(sql, false).getOrElse(sqlParser(sql))
+ ddlParser.parse(sql, false).getOrElse(sqlParser.parse(sql))
}
protected[sql] def executeSql(sql: String): this.QueryExecution = executePlan(parseSql(sql))
@@ -1120,12 +1120,12 @@ class SQLContext(@transient val sparkContext: SparkContext)
protected[sql] class QueryExecution(val logical: LogicalPlan) {
def assertAnalyzed(): Unit = analyzer.checkAnalysis(analyzed)
- lazy val analyzed: LogicalPlan = analyzer(logical)
+ lazy val analyzed: LogicalPlan = analyzer.execute(logical)
lazy val withCachedData: LogicalPlan = {
assertAnalyzed()
cacheManager.useCachedData(analyzed)
}
- lazy val optimizedPlan: LogicalPlan = optimizer(withCachedData)
+ lazy val optimizedPlan: LogicalPlan = optimizer.execute(withCachedData)
// TODO: Don't just pick the first one...
lazy val sparkPlan: SparkPlan = {
@@ -1134,7 +1134,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
}
// executedPlan should not be used to initialize any SparkPlan. It should be
// only used for execution.
- lazy val executedPlan: SparkPlan = prepareForExecution(sparkPlan)
+ lazy val executedPlan: SparkPlan = prepareForExecution.execute(sparkPlan)
/** Internal version of the RDD. Avoids copies and has no schema */
lazy val toRdd: RDD[Row] = executedPlan.execute()
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
index e159ffe66c..59c89800da 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
@@ -144,7 +144,7 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ
log.debug(
s"Creating Projection: $expressions, inputSchema: $inputSchema, codegen:$codegenEnabled")
if (codegenEnabled) {
- GenerateProjection(expressions, inputSchema)
+ GenerateProjection.generate(expressions, inputSchema)
} else {
new InterpretedProjection(expressions, inputSchema)
}
@@ -156,7 +156,7 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ
log.debug(
s"Creating MutableProj: $expressions, inputSchema: $inputSchema, codegen:$codegenEnabled")
if(codegenEnabled) {
- GenerateMutableProjection(expressions, inputSchema)
+ GenerateMutableProjection.generate(expressions, inputSchema)
} else {
() => new InterpretedMutableProjection(expressions, inputSchema)
}
@@ -166,15 +166,15 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ
protected def newPredicate(
expression: Expression, inputSchema: Seq[Attribute]): (Row) => Boolean = {
if (codegenEnabled) {
- GeneratePredicate(expression, inputSchema)
+ GeneratePredicate.generate(expression, inputSchema)
} else {
- InterpretedPredicate(expression, inputSchema)
+ InterpretedPredicate.create(expression, inputSchema)
}
}
protected def newOrdering(order: Seq[SortOrder], inputSchema: Seq[Attribute]): Ordering[Row] = {
if (codegenEnabled) {
- GenerateOrdering(order, inputSchema)
+ GenerateOrdering.generate(order, inputSchema)
} else {
new RowOrdering(order, inputSchema)
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
index 83b1a83765..56200f6b8c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
@@ -59,7 +59,7 @@ case class BroadcastNestedLoopJoin(
}
@transient private lazy val boundCondition =
- InterpretedPredicate(
+ InterpretedPredicate.create(
condition
.map(c => BindReferences.bindReference(c, left.output ++ right.output))
.getOrElse(Literal(true)))
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
index 1fa7e7bd04..e06f63f94b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
@@ -45,7 +45,7 @@ case class LeftSemiJoinBNL(
override def right: SparkPlan = broadcast
@transient private lazy val boundCondition =
- InterpretedPredicate(
+ InterpretedPredicate.create(
condition
.map(c => BindReferences.bindReference(c, left.output ++ right.output))
.getOrElse(Literal(true)))
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala
index af7b3c81ae..88466f52bd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala
@@ -611,7 +611,7 @@ private[sql] case class ParquetRelation2(
val rawPredicate =
partitionPruningPredicates.reduceOption(expressions.And).getOrElse(Literal(true))
- val boundPredicate = InterpretedPredicate(rawPredicate transform {
+ val boundPredicate = InterpretedPredicate.create(rawPredicate transform {
case a: AttributeReference =>
val index = partitionColumns.indexWhere(a.name == _.name)
BoundReference(index, partitionColumns(index).dataType, nullable = true)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
index 78d494184e..e7a0685e01 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
@@ -38,9 +38,9 @@ private[sql] class DDLParser(
parseQuery: String => LogicalPlan)
extends AbstractSparkSQLParser with DataTypeParser with Logging {
- def apply(input: String, exceptionOnError: Boolean): Option[LogicalPlan] = {
+ def parse(input: String, exceptionOnError: Boolean): Option[LogicalPlan] = {
try {
- Some(apply(input))
+ Some(parse(input))
} catch {
case ddlException: DDLException => throw ddlException
case _ if !exceptionOnError => None
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index c4a73b3004..dd06b2620c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -93,7 +93,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
if (conf.dialect == "sql") {
super.sql(substituted)
} else if (conf.dialect == "hiveql") {
- val ddlPlan = ddlParserWithHiveQL(sqlText, exceptionOnError = false)
+ val ddlPlan = ddlParserWithHiveQL.parse(sqlText, exceptionOnError = false)
DataFrame(this, ddlPlan.getOrElse(HiveQl.parseSql(substituted)))
} else {
sys.error(s"Unsupported SQL dialect: ${conf.dialect}. Try 'sql' or 'hiveql'")
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 85061f2277..0ea6d57b81 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -144,7 +144,7 @@ private[hive] object HiveQl {
protected val hqlParser = {
val fallback = new ExtendedHiveQlParser
- new SparkSQLParser(fallback(_))
+ new SparkSQLParser(fallback.parse(_))
}
/**
@@ -240,7 +240,7 @@ private[hive] object HiveQl {
/** Returns a LogicalPlan for a given HiveQL string. */
- def parseSql(sql: String): LogicalPlan = hqlParser(sql)
+ def parseSql(sql: String): LogicalPlan = hqlParser.parse(sql)
val errorRegEx = "line (\\d+):(\\d+) (.*)".r
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index a6f4fbe8ab..be9249a8b1 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -119,9 +119,9 @@ private[hive] trait HiveStrategies {
val inputData = new GenericMutableRow(relation.partitionKeys.size)
val pruningCondition =
if (codegenEnabled) {
- GeneratePredicate(castedPredicate)
+ GeneratePredicate.generate(castedPredicate)
} else {
- InterpretedPredicate(castedPredicate)
+ InterpretedPredicate.create(castedPredicate)
}
val partitions = relation.hiveQlPartitions.filter { part =>
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index 6570fa1043..9f17bca083 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -185,7 +185,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
logDebug(s"Query references test tables: ${referencedTestTables.mkString(", ")}")
referencedTestTables.foreach(loadTestTable)
// Proceed with analysis.
- analyzer(logical)
+ analyzer.execute(logical)
}
}