aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorJakob Odersky <jakob@odersky.com>2016-04-21 22:04:08 -0700
committerDavies Liu <davies.liu@gmail.com>2016-04-21 22:04:08 -0700
commit80127935df06a829b734cafc2447aa1f3df40288 (patch)
tree03f1bfd290b6ba20c894e254603647e64b264b53 /repl
parent3405cc775843a3a80d009d4f9079ba9daa2220e7 (diff)
downloadspark-80127935df06a829b734cafc2447aa1f3df40288.tar.gz
spark-80127935df06a829b734cafc2447aa1f3df40288.tar.bz2
spark-80127935df06a829b734cafc2447aa1f3df40288.zip
[SPARK-10001] [CORE] Interrupt tasks in repl with Ctrl+C
## What changes were proposed in this pull request? Improve signal handling to allow interrupting running tasks from the REPL (with Ctrl+C). If no tasks are running or Ctrl+C is pressed twice, the signal is forwarded to the default handler resulting in the usual termination of the application. This PR is a rewrite of -- and therefore closes #8216 -- as per piaozhexiu's request ## How was this patch tested? Signal handling is not easily testable therefore no unit tests were added. Nevertheless, the new functionality is implemented in a best-effort approach, soft-failing in case signals aren't available on a specific OS. Author: Jakob Odersky <jakob@odersky.com> Closes #12557 from jodersky/SPARK-10001-sigint.
Diffstat (limited to 'repl')
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala1
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala1
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/Signaling.scala42
3 files changed, 44 insertions, 0 deletions
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index c5dc6ba221..8e7fc46e32 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -1022,6 +1022,7 @@ class SparkILoop(
}
sparkContext = new SparkContext(conf)
logInfo("Created spark context..")
+ Signaling.cancelOnInterrupt(sparkContext)
sparkContext
}
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index b822ff496c..bd853f1522 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -88,6 +88,7 @@ object Main extends Logging {
}
sparkContext = new SparkContext(conf)
logInfo("Created spark context..")
+ Signaling.cancelOnInterrupt(sparkContext)
sparkContext
}
diff --git a/repl/src/main/scala/org/apache/spark/repl/Signaling.scala b/repl/src/main/scala/org/apache/spark/repl/Signaling.scala
new file mode 100644
index 0000000000..c305ed545c
--- /dev/null
+++ b/repl/src/main/scala/org/apache/spark/repl/Signaling.scala
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.repl
+
+import org.apache.spark.SparkContext
+import org.apache.spark.internal.Logging
+import org.apache.spark.util.{Signaling => USignaling}
+
+private[repl] object Signaling extends Logging {
+
+ /**
+ * Register a SIGINT handler, that terminates all active spark jobs or terminates
+ * when no jobs are currently running.
+ * This makes it possible to interrupt a running shell job by pressing Ctrl+C.
+ */
+ def cancelOnInterrupt(ctx: SparkContext): Unit = USignaling.register("INT") {
+ if (!ctx.statusTracker.getActiveJobIds().isEmpty) {
+ logWarning("Cancelling all active jobs, this can take a while. " +
+ "Press Ctrl+C again to exit now.")
+ ctx.cancelAllJobs()
+ true
+ } else {
+ false
+ }
+ }
+
+}