aboutsummaryrefslogtreecommitdiff
path: root/sbin/spark-class
diff options
context:
space:
mode:
Diffstat (limited to 'sbin/spark-class')
-rwxr-xr-xsbin/spark-class154
1 files changed, 0 insertions, 154 deletions
diff --git a/sbin/spark-class b/sbin/spark-class
deleted file mode 100755
index 4e440d8729..0000000000
--- a/sbin/spark-class
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-cygwin=false
-case "`uname`" in
- CYGWIN*) cygwin=true;;
-esac
-
-SCALA_VERSION=2.10
-
-# Figure out where the Scala framework is installed
-FWDIR="$(cd `dirname $0`/..; pwd)"
-
-# Export this as SPARK_HOME
-export SPARK_HOME="$FWDIR"
-
-# Load environment variables from conf/spark-env.sh, if it exists
-if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
- . $FWDIR/conf/spark-env.sh
-fi
-
-if [ -z "$1" ]; then
- echo "Usage: spark-class <class> [<args>]" >&2
- exit 1
-fi
-
-# If this is a standalone cluster daemon, reset SPARK_JAVA_OPTS and SPARK_MEM to reasonable
-# values for that; it doesn't need a lot
-if [ "$1" = "org.apache.spark.deploy.master.Master" -o "$1" = "org.apache.spark.deploy.worker.Worker" ]; then
- SPARK_MEM=${SPARK_DAEMON_MEMORY:-512m}
- SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.akka.logLifecycleEvents=true"
- # Do not overwrite SPARK_JAVA_OPTS environment variable in this script
- OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS" # Empty by default
-else
- OUR_JAVA_OPTS="$SPARK_JAVA_OPTS"
-fi
-
-
-# Add java opts for master, worker, executor. The opts maybe null
-case "$1" in
- 'org.apache.spark.deploy.master.Master')
- OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_MASTER_OPTS"
- ;;
- 'org.apache.spark.deploy.worker.Worker')
- OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_WORKER_OPTS"
- ;;
- 'org.apache.spark.executor.CoarseGrainedExecutorBackend')
- OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
- ;;
- 'org.apache.spark.executor.MesosExecutorBackend')
- OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
- ;;
- 'org.apache.spark.repl.Main')
- OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_REPL_OPTS"
- ;;
-esac
-
-# Find the java binary
-if [ -n "${JAVA_HOME}" ]; then
- RUNNER="${JAVA_HOME}/bin/java"
-else
- if [ `command -v java` ]; then
- RUNNER="java"
- else
- echo "JAVA_HOME is not set" >&2
- exit 1
- fi
-fi
-
-# Set SPARK_MEM if it isn't already set since we also use it for this process
-SPARK_MEM=${SPARK_MEM:-512m}
-export SPARK_MEM
-
-# Set JAVA_OPTS to be able to load native libraries and to set heap size
-JAVA_OPTS="$OUR_JAVA_OPTS"
-JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH"
-JAVA_OPTS="$JAVA_OPTS -Xms$SPARK_MEM -Xmx$SPARK_MEM"
-# Load extra JAVA_OPTS from conf/java-opts, if it exists
-if [ -e "$FWDIR/conf/java-opts" ] ; then
- JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
-fi
-export JAVA_OPTS
-# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala!
-
-if [ ! -f "$FWDIR/RELEASE" ]; then
- # Exit if the user hasn't compiled Spark
- num_jars=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar" | wc -l)
- jars_list=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar")
- if [ "$num_jars" -eq "0" ]; then
- echo "Failed to find Spark assembly in $FWDIR/assembly/target/scala-$SCALA_VERSION/" >&2
- echo "You need to build Spark with 'sbt/sbt assembly' before running this program." >&2
- exit 1
- fi
- if [ "$num_jars" -gt "1" ]; then
- echo "Found multiple Spark assembly jars in $FWDIR/assembly/target/scala-$SCALA_VERSION:" >&2
- echo "$jars_list"
- echo "Please remove all but one jar."
- exit 1
- fi
-fi
-
-TOOLS_DIR="$FWDIR"/tools
-SPARK_TOOLS_JAR=""
-if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
- # Use the JAR from the SBT build
- export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
-fi
-if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
- # Use the JAR from the Maven build
- # TODO: this also needs to become an assembly!
- export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
-fi
-
-# Compute classpath using external script
-CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
-
-if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
- CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
-fi
-
-if $cygwin; then
- CLASSPATH=`cygpath -wp $CLASSPATH`
- if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
- export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR`
- fi
-fi
-export CLASSPATH
-
-if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
- echo -n "Spark Command: "
- echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
- echo "========================================"
- echo
-fi
-
-exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
-
-