aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
diff options
context:
space:
mode:
Diffstat (limited to 'sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala18
1 files changed, 2 insertions, 16 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
index 695b1824e8..1859c6e7ad 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
@@ -255,28 +255,14 @@ class SQLMetricsSuite extends SparkFunSuite with SharedSQLContext {
val df1 = Seq((1, "1"), (2, "2")).toDF("key", "value")
val df2 = Seq((1, "1"), (2, "2"), (3, "3"), (4, "4")).toDF("key2", "value")
// Assume the execution plan is
- // ... -> BroadcastLeftSemiJoinHash(nodeId = 0)
+ // ... -> BroadcastHashJoin(nodeId = 0)
val df = df1.join(broadcast(df2), $"key" === $"key2", "leftsemi")
testSparkPlanMetrics(df, 2, Map(
- 0L -> ("BroadcastLeftSemiJoinHash", Map(
+ 0L -> ("BroadcastHashJoin", Map(
"number of output rows" -> 2L)))
)
}
- test("ShuffledHashJoin metrics") {
- withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "0") {
- val df1 = Seq((1, "1"), (2, "2")).toDF("key", "value")
- val df2 = Seq((1, "1"), (2, "2"), (3, "3"), (4, "4")).toDF("key2", "value")
- // Assume the execution plan is
- // ... -> ShuffledHashJoin(nodeId = 0)
- val df = df1.join(df2, $"key" === $"key2", "leftsemi")
- testSparkPlanMetrics(df, 1, Map(
- 0L -> ("ShuffledHashJoin", Map(
- "number of output rows" -> 2L)))
- )
- }
- }
-
test("CartesianProduct metrics") {
val testDataForJoin = testData2.filter('a < 2) // TestData2(1, 1) :: TestData2(1, 2)
testDataForJoin.registerTempTable("testDataForJoin")