diff options
author | Xin Ren <iamshrek@126.com> | 2015-12-08 11:44:51 -0600 |
---|---|---|
committer | Imran Rashid <irashid@cloudera.com> | 2015-12-08 11:46:46 -0600 |
commit | 6cb06e8711fd6ac10c57faeb94bc323cae1cef27 (patch) | |
tree | 2eb7479b50302d93e1532e26f7892f5c2974df7b /core/src/test/resources/HistoryServerExpectations/failed_stage_list_json_expectation.json | |
parent | e3735ce1602826f0a8e0ca9e08730923843449ee (diff) | |
download | spark-6cb06e8711fd6ac10c57faeb94bc323cae1cef27.tar.gz spark-6cb06e8711fd6ac10c57faeb94bc323cae1cef27.tar.bz2 spark-6cb06e8711fd6ac10c57faeb94bc323cae1cef27.zip |
[SPARK-11155][WEB UI] Stage summary json should include stage duration
The json endpoint for stages doesn't include information on the stage duration that is present in the UI. This looks like a simple oversight, they should be included. eg., the metrics should be included at api/v1/applications/<appId>/stages.
Metrics I've added are: submissionTime, firstTaskLaunchedTime and completionTime
Author: Xin Ren <iamshrek@126.com>
Closes #10107 from keypointt/SPARK-11155.
Diffstat (limited to 'core/src/test/resources/HistoryServerExpectations/failed_stage_list_json_expectation.json')
-rw-r--r-- | core/src/test/resources/HistoryServerExpectations/failed_stage_list_json_expectation.json | 5 |
1 files changed, 4 insertions, 1 deletions
diff --git a/core/src/test/resources/HistoryServerExpectations/failed_stage_list_json_expectation.json b/core/src/test/resources/HistoryServerExpectations/failed_stage_list_json_expectation.json index bff6a4f69d..08b692eda8 100644 --- a/core/src/test/resources/HistoryServerExpectations/failed_stage_list_json_expectation.json +++ b/core/src/test/resources/HistoryServerExpectations/failed_stage_list_json_expectation.json @@ -6,6 +6,9 @@ "numCompleteTasks" : 7, "numFailedTasks" : 1, "executorRunTime" : 278, + "submissionTime" : "2015-02-03T16:43:06.296GMT", + "firstTaskLaunchedTime" : "2015-02-03T16:43:06.296GMT", + "completionTime" : "2015-02-03T16:43:06.347GMT", "inputBytes" : 0, "inputRecords" : 0, "outputBytes" : 0, @@ -20,4 +23,4 @@ "details" : "org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line11.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:20)\n$line11.$read$$iwC$$iwC$$iwC.<init>(<console>:25)\n$line11.$read$$iwC$$iwC.<init>(<console>:27)\n$line11.$read$$iwC.<init>(<console>:29)\n$line11.$read.<init>(<console>:31)\n$line11.$read$.<init>(<console>:35)\n$line11.$read$.<clinit>(<console>)\n$line11.$eval$.<init>(<console>:7)\n$line11.$eval$.<clinit>(<console>)\n$line11.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)", "schedulingPool" : "default", "accumulatorUpdates" : [ ] -} ]
\ No newline at end of file +} ] |