aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-08-26 22:52:16 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-26 22:52:16 -0700
commit7557c4cfef2398d124b00472e2696f0559a36ef7 (patch)
treec780f689efe09f3bc60077a62c9a43688c854279 /core
parentbf719056b71d55e1194554661dfa194ed03d364d (diff)
downloadspark-7557c4cfef2398d124b00472e2696f0559a36ef7.tar.gz
spark-7557c4cfef2398d124b00472e2696f0559a36ef7.tar.bz2
spark-7557c4cfef2398d124b00472e2696f0559a36ef7.zip
[SPARK-3167] Handle special driver configs in Windows
This is an effort to bring the Windows scripts up to speed after recent splashing changes in #1845. Author: Andrew Or <andrewor14@gmail.com> Closes #2129 from andrewor14/windows-config and squashes the following commits: 881a8f0 [Andrew Or] Add reference to Windows taskkill 92e6047 [Andrew Or] Update a few comments (minor) 22b1acd [Andrew Or] Fix style again (minor) afcffea [Andrew Or] Fix style (minor) 72004c2 [Andrew Or] Actually respect --driver-java-options 803218b [Andrew Or] Actually respect SPARK_*_CLASSPATH eeb34a0 [Andrew Or] Update outdated comment (minor) 35caecc [Andrew Or] In Windows, actually kill Java processes on exit f97daa2 [Andrew Or] Fix Windows spark shell stdin issue 83ebe60 [Andrew Or] Parse special driver configs in Windows (broken)
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitDriverBootstrapper.scala19
1 files changed, 13 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitDriverBootstrapper.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitDriverBootstrapper.scala
index af607e6a4a..7ca96ed57c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitDriverBootstrapper.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitDriverBootstrapper.scala
@@ -133,17 +133,24 @@ private[spark] object SparkSubmitDriverBootstrapper {
val process = builder.start()
// Redirect stdin, stdout, and stderr to/from the child JVM
- val stdinThread = new RedirectThread(System.in, process.getOutputStream, "redirect stdin")
val stdoutThread = new RedirectThread(process.getInputStream, System.out, "redirect stdout")
val stderrThread = new RedirectThread(process.getErrorStream, System.err, "redirect stderr")
- stdinThread.start()
stdoutThread.start()
stderrThread.start()
- // Terminate on broken pipe, which signals that the parent process has exited. This is
- // important for the PySpark shell, where Spark submit itself is a python subprocess.
- stdinThread.join()
- process.destroy()
+ // In Windows, the subprocess reads directly from our stdin, so we should avoid spawning
+ // a thread that contends with the subprocess in reading from System.in.
+ if (Utils.isWindows) {
+ // For the PySpark shell, the termination of this process is handled in java_gateway.py
+ process.waitFor()
+ } else {
+ // Terminate on broken pipe, which signals that the parent process has exited. This is
+ // important for the PySpark shell, where Spark submit itself is a python subprocess.
+ val stdinThread = new RedirectThread(System.in, process.getOutputStream, "redirect stdin")
+ stdinThread.start()
+ stdinThread.join()
+ process.destroy()
+ }
}
}