aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2013-08-09 13:00:33 -0700
committerPatrick Wendell <pwendell@gmail.com>2013-08-09 13:00:33 -0700
commitcc6b92e80ec42cc21719331e7cb8ef06aed33074 (patch)
tree3e168cd5c2d111bc89b09f8b97a09e75f0d72ca9
parentf94fc75c3f3b8b1337fe3f849c2cba119eaa9bc7 (diff)
parent3970b580c206d58189ade3930aca1e478bff8c84 (diff)
downloadspark-cc6b92e80ec42cc21719331e7cb8ef06aed33074.tar.gz
spark-cc6b92e80ec42cc21719331e7cb8ef06aed33074.tar.bz2
spark-cc6b92e80ec42cc21719331e7cb8ef06aed33074.zip
Merge pull request #775 from pwendell/print-launch-command
Log the launch command for Spark daemons
-rwxr-xr-xbin/spark-daemon.sh5
-rw-r--r--core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala16
-rwxr-xr-xrun10
3 files changed, 26 insertions, 5 deletions
diff --git a/bin/spark-daemon.sh b/bin/spark-daemon.sh
index a5b88ca785..96c71e66ca 100755
--- a/bin/spark-daemon.sh
+++ b/bin/spark-daemon.sh
@@ -75,6 +75,8 @@ if [ "$SPARK_IDENT_STRING" = "" ]; then
export SPARK_IDENT_STRING="$USER"
fi
+export SPARK_PRINT_LAUNCH_COMMAND="1"
+
# get log directory
if [ "$SPARK_LOG_DIR" = "" ]; then
export SPARK_LOG_DIR="$SPARK_HOME/logs"
@@ -124,8 +126,9 @@ case $startStop in
spark_rotate_log $log
echo starting $command, logging to $log
+ echo "Spark Daemon: $command" > $log
cd "$SPARK_PREFIX"
- nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/run $command "$@" > "$log" 2>&1 < /dev/null &
+ nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/run $command "$@" >> "$log" 2>&1 < /dev/null &
echo $! > $pid
sleep 1; head "$log"
;;
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
index 8f6d25c33f..4537c8305c 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
@@ -22,6 +22,9 @@ import java.lang.System.getenv
import akka.actor.ActorRef
+import com.google.common.base.Charsets
+import com.google.common.io.Files
+
import spark.{Utils, Logging}
import spark.deploy.{ExecutorState, ApplicationDescription}
import spark.deploy.DeployMessages.ExecutorStateChanged
@@ -126,7 +129,7 @@ private[spark] class ExecutorRunner(
/** Spawn a thread that will redirect a given stream to a file */
def redirectStream(in: InputStream, file: File) {
- val out = new FileOutputStream(file)
+ val out = new FileOutputStream(file, true)
new Thread("redirect output to " + file) {
override def run() {
try {
@@ -162,9 +165,16 @@ private[spark] class ExecutorRunner(
env.put("SPARK_LAUNCH_WITH_SCALA", "0")
process = builder.start()
+ val header = "Spark Executor Command: %s\n%s\n\n".format(
+ command.mkString("\"", "\" \"", "\""), "=" * 40)
+
// Redirect its stdout and stderr to files
- redirectStream(process.getInputStream, new File(executorDir, "stdout"))
- redirectStream(process.getErrorStream, new File(executorDir, "stderr"))
+ val stdout = new File(executorDir, "stdout")
+ Files.write(header, stdout, Charsets.UTF_8)
+ redirectStream(process.getInputStream, stdout)
+
+ val stderr = new File(executorDir, "stderr")
+ redirectStream(process.getErrorStream, stderr)
// Wait for it to exit; this is actually a bad thing if it happens, because we expect to run
// long-lived processes only. However, in the future, we might restart the executor a few
diff --git a/run b/run
index 0a440627a1..3868332c90 100755
--- a/run
+++ b/run
@@ -164,4 +164,12 @@ else
# The JVM doesn't read JAVA_OPTS by default so we need to pass it in
EXTRA_ARGS="$JAVA_OPTS"
fi
-exec "$RUNNER" -cp "$CLASSPATH" $EXTRA_ARGS "$@"
+
+command="$RUNNER -cp \"$CLASSPATH\" $EXTRA_ARGS $@"
+if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
+ echo "Spark Command: $command"
+ echo "========================================"
+ echo
+fi
+
+exec $command