aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala10
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala8
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala8
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala11
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala2
5 files changed, 28 insertions, 11 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala
index 87b8647655..9d64f35efc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala
@@ -65,10 +65,12 @@ case class LocalRelation(output: Seq[Attribute], data: Seq[InternalRow] = Nil)
}
}
- override def sameResult(plan: LogicalPlan): Boolean = plan match {
- case LocalRelation(otherOutput, otherData) =>
- otherOutput.map(_.dataType) == output.map(_.dataType) && otherData == data
- case _ => false
+ override def sameResult(plan: LogicalPlan): Boolean = {
+ plan.canonicalized match {
+ case LocalRelation(otherOutput, otherData) =>
+ otherOutput.map(_.dataType) == output.map(_.dataType) && otherData == data
+ case _ => false
+ }
}
override lazy val statistics =
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala
index ee72a70cce..e2c23a4ba8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala
@@ -87,9 +87,11 @@ private[sql] case class LogicalRDD(
override def newInstance(): LogicalRDD.this.type =
LogicalRDD(output.map(_.newInstance()), rdd)(session).asInstanceOf[this.type]
- override def sameResult(plan: LogicalPlan): Boolean = plan match {
- case LogicalRDD(_, otherRDD) => rdd.id == otherRDD.id
- case _ => false
+ override def sameResult(plan: LogicalPlan): Boolean = {
+ plan.canonicalized match {
+ case LogicalRDD(_, otherRDD) => rdd.id == otherRDD.id
+ case _ => false
+ }
}
override protected def stringArgs: Iterator[Any] = Iterator(output)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala
index a418d02983..39c8606fd1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/LogicalRelation.scala
@@ -60,9 +60,11 @@ case class LogicalRelation(
com.google.common.base.Objects.hashCode(relation, output)
}
- override def sameResult(otherPlan: LogicalPlan): Boolean = otherPlan match {
- case LogicalRelation(otherRelation, _, _) => relation == otherRelation
- case _ => false
+ override def sameResult(otherPlan: LogicalPlan): Boolean = {
+ otherPlan.canonicalized match {
+ case LogicalRelation(otherRelation, _, _) => relation == otherRelation
+ case _ => false
+ }
}
// When comparing two LogicalRelations from within LogicalPlan.sameResult, we only need
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 44bafa55bc..3306ac42a3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -552,4 +552,15 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
selectStar,
Seq(Row(1, "1")))
}
+
+ test("SPARK-15915 Logical plans should use canonicalized plan when override sameResult") {
+ val localRelation = Seq(1, 2, 3).toDF()
+ localRelation.createOrReplaceTempView("localRelation")
+
+ spark.catalog.cacheTable("localRelation")
+ assert(
+ localRelation.queryExecution.withCachedData.collect {
+ case i: InMemoryRelation => i
+ }.size == 1)
+ }
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
index 5596a4470f..58bca2059c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
@@ -185,7 +185,7 @@ private[hive] case class MetastoreRelation(
/** Only compare database and tablename, not alias. */
override def sameResult(plan: LogicalPlan): Boolean = {
- plan match {
+ plan.canonicalized match {
case mr: MetastoreRelation =>
mr.databaseName == databaseName && mr.tableName == tableName
case _ => false