aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorLiang-Chi Hsieh <viirya@gmail.com>2016-12-01 01:57:58 -0800
committerReynold Xin <rxin@databricks.com>2016-12-01 01:57:58 -0800
commitdbf842b7a8479f9566146192ffc04421591742d5 (patch)
treeab940f255d9161e22c3d4e174317a8b54d7b19df /core
parent88f559f20a5208f2386b874eb119f1cba2c748c7 (diff)
downloadspark-dbf842b7a8479f9566146192ffc04421591742d5.tar.gz
spark-dbf842b7a8479f9566146192ffc04421591742d5.tar.bz2
spark-dbf842b7a8479f9566146192ffc04421591742d5.zip
[SPARK-18666][WEB UI] Remove the codes checking deprecated config spark.sql.unsafe.enabled
## What changes were proposed in this pull request? `spark.sql.unsafe.enabled` is deprecated since 1.6. There still are codes in UI to check it. We should remove it and clean the codes. ## How was this patch tested? Changes to related existing unit test. Please review http://spark.apache.org/contributing.html before opening a pull request. Author: Liang-Chi Hsieh <viirya@gmail.com> Closes #16095 from viirya/remove-deprecated-config-code.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala49
-rw-r--r--core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala16
2 files changed, 18 insertions, 47 deletions
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index 8c7cefe200..412ddfa9fa 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -70,8 +70,6 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
// if we find that it's okay.
private val MAX_TIMELINE_TASKS = parent.conf.getInt("spark.ui.timeline.tasks.maximum", 1000)
- private val displayPeakExecutionMemory = parent.conf.getBoolean("spark.sql.unsafe.enabled", true)
-
private def getLocalitySummaryString(stageData: StageUIData): String = {
val localities = stageData.taskData.values.map(_.taskInfo.taskLocality)
val localityCounts = localities.groupBy(identity).mapValues(_.size)
@@ -252,15 +250,13 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
<span class="additional-metric-title">Getting Result Time</span>
</span>
</li>
- {if (displayPeakExecutionMemory) {
- <li>
- <span data-toggle="tooltip"
- title={ToolTips.PEAK_EXECUTION_MEMORY} data-placement="right">
- <input type="checkbox" name={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}/>
- <span class="additional-metric-title">Peak Execution Memory</span>
- </span>
- </li>
- }}
+ <li>
+ <span data-toggle="tooltip"
+ title={ToolTips.PEAK_EXECUTION_MEMORY} data-placement="right">
+ <input type="checkbox" name={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}/>
+ <span class="additional-metric-title">Peak Execution Memory</span>
+ </span>
+ </li>
</ul>
</div>
</div>
@@ -532,13 +528,9 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
{serializationQuantiles}
</tr>,
<tr class={TaskDetailsClassNames.GETTING_RESULT_TIME}>{gettingResultQuantiles}</tr>,
- if (displayPeakExecutionMemory) {
- <tr class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
- {peakExecutionMemoryQuantiles}
- </tr>
- } else {
- Nil
- },
+ <tr class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
+ {peakExecutionMemoryQuantiles}
+ </tr>,
if (stageData.hasInput) <tr>{inputQuantiles}</tr> else Nil,
if (stageData.hasOutput) <tr>{outputQuantiles}</tr> else Nil,
if (stageData.hasShuffleRead) {
@@ -1166,9 +1158,6 @@ private[ui] class TaskPagedTable(
desc: Boolean,
executorsListener: ExecutorsListener) extends PagedTable[TaskTableRowData] {
- // We only track peak memory used for unsafe operators
- private val displayPeakExecutionMemory = conf.getBoolean("spark.sql.unsafe.enabled", true)
-
override def tableId: String = "task-table"
override def tableCssClass: String =
@@ -1217,14 +1206,8 @@ private[ui] class TaskPagedTable(
("Task Deserialization Time", TaskDetailsClassNames.TASK_DESERIALIZATION_TIME),
("GC Time", ""),
("Result Serialization Time", TaskDetailsClassNames.RESULT_SERIALIZATION_TIME),
- ("Getting Result Time", TaskDetailsClassNames.GETTING_RESULT_TIME)) ++
- {
- if (displayPeakExecutionMemory) {
- Seq(("Peak Execution Memory", TaskDetailsClassNames.PEAK_EXECUTION_MEMORY))
- } else {
- Nil
- }
- } ++
+ ("Getting Result Time", TaskDetailsClassNames.GETTING_RESULT_TIME),
+ ("Peak Execution Memory", TaskDetailsClassNames.PEAK_EXECUTION_MEMORY)) ++
{if (hasAccumulators) Seq(("Accumulators", "")) else Nil} ++
{if (hasInput) Seq(("Input Size / Records", "")) else Nil} ++
{if (hasOutput) Seq(("Output Size / Records", "")) else Nil} ++
@@ -1316,11 +1299,9 @@ private[ui] class TaskPagedTable(
<td class={TaskDetailsClassNames.GETTING_RESULT_TIME}>
{UIUtils.formatDuration(task.gettingResultTime)}
</td>
- {if (displayPeakExecutionMemory) {
- <td class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
- {Utils.bytesToString(task.peakExecutionMemoryUsed)}
- </td>
- }}
+ <td class={TaskDetailsClassNames.PEAK_EXECUTION_MEMORY}>
+ {Utils.bytesToString(task.peakExecutionMemoryUsed)}
+ </td>
{if (task.accumulators.nonEmpty) {
<td>{Unparsed(task.accumulators.get)}</td>
}}
diff --git a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala b/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
index d30b987d6c..11482d187a 100644
--- a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
@@ -35,25 +35,15 @@ class StagePageSuite extends SparkFunSuite with LocalSparkContext {
private val peakExecutionMemory = 10
- test("peak execution memory only displayed if unsafe is enabled") {
- val unsafeConf = "spark.sql.unsafe.enabled"
- val conf = new SparkConf(false).set(unsafeConf, "true")
+ test("peak execution memory should displayed") {
+ val conf = new SparkConf(false)
val html = renderStagePage(conf).toString().toLowerCase
val targetString = "peak execution memory"
assert(html.contains(targetString))
- // Disable unsafe and make sure it's not there
- val conf2 = new SparkConf(false).set(unsafeConf, "false")
- val html2 = renderStagePage(conf2).toString().toLowerCase
- assert(!html2.contains(targetString))
- // Avoid setting anything; it should be displayed by default
- val conf3 = new SparkConf(false)
- val html3 = renderStagePage(conf3).toString().toLowerCase
- assert(html3.contains(targetString))
}
test("SPARK-10543: peak execution memory should be per-task rather than cumulative") {
- val unsafeConf = "spark.sql.unsafe.enabled"
- val conf = new SparkConf(false).set(unsafeConf, "true")
+ val conf = new SparkConf(false)
val html = renderStagePage(conf).toString().toLowerCase
// verify min/25/50/75/max show task value not cumulative values
assert(html.contains(s"<td>$peakExecutionMemory.0 b</td>" * 5))