diff options
author | Ala Luszczak <ala@databricks.com> | 2017-02-10 21:10:02 +0100 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2017-02-10 21:10:02 +0100 |
commit | d785217b791882e075ad537852d49d78fc1ca31b (patch) | |
tree | fbfb20bdc0098918327080e7dca2d88feedd6d7f /core/src/main | |
parent | 3a43ae7c0bbce8eda98f50a97a0138f860197a98 (diff) | |
download | spark-d785217b791882e075ad537852d49d78fc1ca31b.tar.gz spark-d785217b791882e075ad537852d49d78fc1ca31b.tar.bz2 spark-d785217b791882e075ad537852d49d78fc1ca31b.zip |
[SPARK-19549] Allow providing reason for stage/job cancelling
## What changes were proposed in this pull request?
This change add an optional argument to `SparkContext.cancelStage()` and `SparkContext.cancelJob()` functions, which allows the caller to provide exact reason for the cancellation.
## How was this patch tested?
Adds unit test.
Author: Ala Luszczak <ala@databricks.com>
Closes #16887 from ala/cancel.
Diffstat (limited to 'core/src/main')
4 files changed, 56 insertions, 21 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index eb13686f26..cbab7b8844 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -2207,10 +2207,32 @@ class SparkContext(config: SparkConf) extends Logging { * Cancel a given job if it's scheduled or running. * * @param jobId the job ID to cancel + * @param reason optional reason for cancellation * @note Throws `InterruptedException` if the cancel message cannot be sent */ - def cancelJob(jobId: Int) { - dagScheduler.cancelJob(jobId) + def cancelJob(jobId: Int, reason: String): Unit = { + dagScheduler.cancelJob(jobId, Option(reason)) + } + + /** + * Cancel a given job if it's scheduled or running. + * + * @param jobId the job ID to cancel + * @note Throws `InterruptedException` if the cancel message cannot be sent + */ + def cancelJob(jobId: Int): Unit = { + dagScheduler.cancelJob(jobId, None) + } + + /** + * Cancel a given stage and all jobs associated with it. + * + * @param stageId the stage ID to cancel + * @param reason reason for cancellation + * @note Throws `InterruptedException` if the cancel message cannot be sent + */ + def cancelStage(stageId: Int, reason: String): Unit = { + dagScheduler.cancelStage(stageId, Option(reason)) } /** @@ -2219,8 +2241,8 @@ class SparkContext(config: SparkConf) extends Logging { * @param stageId the stage ID to cancel * @note Throws `InterruptedException` if the cancel message cannot be sent */ - def cancelStage(stageId: Int) { - dagScheduler.cancelStage(stageId) + def cancelStage(stageId: Int): Unit = { + dagScheduler.cancelStage(stageId, None) } /** diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala index 6177bafc11..b9d7e1328d 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala @@ -696,9 +696,9 @@ class DAGScheduler( /** * Cancel a job that is running or waiting in the queue. */ - def cancelJob(jobId: Int): Unit = { + def cancelJob(jobId: Int, reason: Option[String]): Unit = { logInfo("Asked to cancel job " + jobId) - eventProcessLoop.post(JobCancelled(jobId)) + eventProcessLoop.post(JobCancelled(jobId, reason)) } /** @@ -719,7 +719,7 @@ class DAGScheduler( private[scheduler] def doCancelAllJobs() { // Cancel all running jobs. runningStages.map(_.firstJobId).foreach(handleJobCancellation(_, - reason = "as part of cancellation of all jobs")) + Option("as part of cancellation of all jobs"))) activeJobs.clear() // These should already be empty by this point, jobIdToActiveJob.clear() // but just in case we lost track of some jobs... } @@ -727,8 +727,8 @@ class DAGScheduler( /** * Cancel all jobs associated with a running or scheduled stage. */ - def cancelStage(stageId: Int) { - eventProcessLoop.post(StageCancelled(stageId)) + def cancelStage(stageId: Int, reason: Option[String]) { + eventProcessLoop.post(StageCancelled(stageId, reason)) } /** @@ -785,7 +785,8 @@ class DAGScheduler( } } val jobIds = activeInGroup.map(_.jobId) - jobIds.foreach(handleJobCancellation(_, "part of cancelled job group %s".format(groupId))) + jobIds.foreach(handleJobCancellation(_, + Option("part of cancelled job group %s".format(groupId)))) } private[scheduler] def handleBeginEvent(task: Task[_], taskInfo: TaskInfo) { @@ -1377,24 +1378,30 @@ class DAGScheduler( } } - private[scheduler] def handleStageCancellation(stageId: Int) { + private[scheduler] def handleStageCancellation(stageId: Int, reason: Option[String]) { stageIdToStage.get(stageId) match { case Some(stage) => val jobsThatUseStage: Array[Int] = stage.jobIds.toArray jobsThatUseStage.foreach { jobId => - handleJobCancellation(jobId, s"because Stage $stageId was cancelled") + val reasonStr = reason match { + case Some(originalReason) => + s"because $originalReason" + case None => + s"because Stage $stageId was cancelled" + } + handleJobCancellation(jobId, Option(reasonStr)) } case None => logInfo("No active jobs to kill for Stage " + stageId) } } - private[scheduler] def handleJobCancellation(jobId: Int, reason: String = "") { + private[scheduler] def handleJobCancellation(jobId: Int, reason: Option[String]) { if (!jobIdToStageIds.contains(jobId)) { logDebug("Trying to cancel unregistered job " + jobId) } else { failJobAndIndependentStages( - jobIdToActiveJob(jobId), "Job %d cancelled %s".format(jobId, reason)) + jobIdToActiveJob(jobId), "Job %d cancelled %s".format(jobId, reason.getOrElse(""))) } } @@ -1636,11 +1643,11 @@ private[scheduler] class DAGSchedulerEventProcessLoop(dagScheduler: DAGScheduler case MapStageSubmitted(jobId, dependency, callSite, listener, properties) => dagScheduler.handleMapStageSubmitted(jobId, dependency, callSite, listener, properties) - case StageCancelled(stageId) => - dagScheduler.handleStageCancellation(stageId) + case StageCancelled(stageId, reason) => + dagScheduler.handleStageCancellation(stageId, reason) - case JobCancelled(jobId) => - dagScheduler.handleJobCancellation(jobId) + case JobCancelled(jobId, reason) => + dagScheduler.handleJobCancellation(jobId, reason) case JobGroupCancelled(groupId) => dagScheduler.handleJobGroupCancelled(groupId) diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala index 03781a2a2b..cda0585f15 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala @@ -53,9 +53,15 @@ private[scheduler] case class MapStageSubmitted( properties: Properties = null) extends DAGSchedulerEvent -private[scheduler] case class StageCancelled(stageId: Int) extends DAGSchedulerEvent +private[scheduler] case class StageCancelled( + stageId: Int, + reason: Option[String]) + extends DAGSchedulerEvent -private[scheduler] case class JobCancelled(jobId: Int) extends DAGSchedulerEvent +private[scheduler] case class JobCancelled( + jobId: Int, + reason: Option[String]) + extends DAGSchedulerEvent private[scheduler] case class JobGroupCancelled(groupId: String) extends DAGSchedulerEvent diff --git a/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala b/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala index 9012289f04..65d7184231 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala @@ -50,7 +50,7 @@ private[spark] class JobWaiter[T]( * will fail this job with a SparkException. */ def cancel() { - dagScheduler.cancelJob(jobId) + dagScheduler.cancelJob(jobId, None) } override def taskSucceeded(index: Int, result: Any): Unit = { |