aboutsummaryrefslogtreecommitdiff
path: root/bin/spark-class
blob: 91d858bc063d051d1d458de64ffc5075a68b9eab (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# NOTE: Any changes to this file must be reflected in SparkSubmitDriverBootstrapper.scala!

cygwin=false
case "`uname`" in
    CYGWIN*) cygwin=true;;
esac

SCALA_VERSION=2.10

# Figure out where Spark is installed
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"

# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"

. "$FWDIR"/bin/load-spark-env.sh

if [ -z "$1" ]; then
  echo "Usage: spark-class <class> [<args>]" 1>&2
  exit 1
fi

if [ -n "$SPARK_MEM" ]; then
  echo -e "Warning: SPARK_MEM is deprecated, please use a more specific config option" 1>&2
  echo -e "(e.g., spark.executor.memory or spark.driver.memory)." 1>&2
fi

# Use SPARK_MEM or 512m as the default memory, to be overridden by specific options
DEFAULT_MEM=${SPARK_MEM:-512m}

SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.akka.logLifecycleEvents=true"

# Add java opts and memory settings for master, worker, history server, executors, and repl.
case "$1" in
  # Master, Worker, and HistoryServer use SPARK_DAEMON_JAVA_OPTS (and specific opts) + SPARK_DAEMON_MEMORY.
  'org.apache.spark.deploy.master.Master')
    OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_MASTER_OPTS"
    OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
    ;;
  'org.apache.spark.deploy.worker.Worker')
    OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_WORKER_OPTS"
    OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
    ;;
  'org.apache.spark.deploy.history.HistoryServer')
    OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_HISTORY_OPTS"
    OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
    ;;

  # Executors use SPARK_JAVA_OPTS + SPARK_EXECUTOR_MEMORY.
  'org.apache.spark.executor.CoarseGrainedExecutorBackend')
    OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
    OUR_JAVA_MEM=${SPARK_EXECUTOR_MEMORY:-$DEFAULT_MEM}
    ;;
  'org.apache.spark.executor.MesosExecutorBackend')
    OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
    OUR_JAVA_MEM=${SPARK_EXECUTOR_MEMORY:-$DEFAULT_MEM}
    ;;

  # Spark submit uses SPARK_JAVA_OPTS + SPARK_SUBMIT_OPTS +
  # SPARK_DRIVER_MEMORY + SPARK_SUBMIT_DRIVER_MEMORY.
  'org.apache.spark.deploy.SparkSubmit')
    OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_SUBMIT_OPTS"
    OUR_JAVA_MEM=${SPARK_DRIVER_MEMORY:-$DEFAULT_MEM}
    if [ -n "$SPARK_SUBMIT_LIBRARY_PATH" ]; then
      OUR_JAVA_OPTS="$OUR_JAVA_OPTS -Djava.library.path=$SPARK_SUBMIT_LIBRARY_PATH"
    fi
    if [ -n "$SPARK_SUBMIT_DRIVER_MEMORY" ]; then
      OUR_JAVA_MEM="$SPARK_SUBMIT_DRIVER_MEMORY"
    fi
    ;;

  *)
    OUR_JAVA_OPTS="$SPARK_JAVA_OPTS"
    OUR_JAVA_MEM=${SPARK_DRIVER_MEMORY:-$DEFAULT_MEM}
    ;;
esac

# Find the java binary
if [ -n "${JAVA_HOME}" ]; then
  RUNNER="${JAVA_HOME}/bin/java"
else
  if [ `command -v java` ]; then
    RUNNER="java"
  else
    echo "JAVA_HOME is not set" >&2
    exit 1
  fi
fi
JAVA_VERSION=$("$RUNNER" -version 2>&1 | grep 'version' | sed 's/.* version "\(.*\)\.\(.*\)\..*"/\1\2/; 1q')

# Set JAVA_OPTS to be able to load native libraries and to set heap size
if [ "$JAVA_VERSION" -ge 18 ]; then
  JAVA_OPTS="$OUR_JAVA_OPTS"
else
  JAVA_OPTS="-XX:MaxPermSize=128m $OUR_JAVA_OPTS"
fi
JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM"

# Load extra JAVA_OPTS from conf/java-opts, if it exists
if [ -e "$FWDIR/conf/java-opts" ] ; then
  JAVA_OPTS="$JAVA_OPTS `cat "$FWDIR"/conf/java-opts`"
fi

# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!

TOOLS_DIR="$FWDIR"/tools
SPARK_TOOLS_JAR=""
if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/spark-tools*[0-9Tg].jar ]; then
  # Use the JAR from the SBT build
  export SPARK_TOOLS_JAR="`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/spark-tools*[0-9Tg].jar`"
fi
if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
  # Use the JAR from the Maven build
  # TODO: this also needs to become an assembly!
  export SPARK_TOOLS_JAR="`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`"
fi

# Compute classpath using external script
classpath_output=$("$FWDIR"/bin/compute-classpath.sh)
if [[ "$?" != "0" ]]; then
  echo "$classpath_output"
  exit 1
else
  CLASSPATH="$classpath_output"
fi

if [[ "$1" =~ org.apache.spark.tools.* ]]; then
  if test -z "$SPARK_TOOLS_JAR"; then
    echo "Failed to find Spark Tools Jar in $FWDIR/tools/target/scala-$SCALA_VERSION/" 1>&2
    echo "You need to build Spark before running $1." 1>&2
    exit 1
  fi
  CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
fi

if $cygwin; then
  CLASSPATH="`cygpath -wp "$CLASSPATH"`"
  if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
    export SPARK_TOOLS_JAR="`cygpath -w "$SPARK_TOOLS_JAR"`"
  fi
fi
export CLASSPATH

# In Spark submit client mode, the driver is launched in the same JVM as Spark submit itself.
# Here we must parse the properties file for relevant "spark.driver.*" configs before launching
# the driver JVM itself. Instead of handling this complexity in Bash, we launch a separate JVM
# to prepare the launch environment of this driver JVM.

if [ -n "$SPARK_SUBMIT_BOOTSTRAP_DRIVER" ]; then
  # This is used only if the properties file actually contains these special configs
  # Export the environment variables needed by SparkSubmitDriverBootstrapper
  export RUNNER
  export CLASSPATH
  export JAVA_OPTS
  export OUR_JAVA_MEM
  export SPARK_CLASS=1
  shift # Ignore main class (org.apache.spark.deploy.SparkSubmit) and use our own
  exec "$RUNNER" org.apache.spark.deploy.SparkSubmitDriverBootstrapper "$@"
else
  # Note: The format of this command is closely echoed in SparkSubmitDriverBootstrapper.scala
  if [ -n "$SPARK_PRINT_LAUNCH_COMMAND" ]; then
    echo -n "Spark Command: " 1>&2
    echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" 1>&2
    echo -e "========================================\n" 1>&2
  fi
  exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
fi