From a90e0eff5982ba46b3658e91dec89bd08ce450e5 Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Sun, 15 Sep 2013 12:47:20 +0530 Subject: version changed 2.9.3 -> 2.10 in shell script. --- repl-bin/src/deb/bin/run | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'repl-bin/src/deb/bin/run') diff --git a/repl-bin/src/deb/bin/run b/repl-bin/src/deb/bin/run index 8b5d8300f2..47bb654baf 100755 --- a/repl-bin/src/deb/bin/run +++ b/repl-bin/src/deb/bin/run @@ -17,7 +17,7 @@ # limitations under the License. # -SCALA_VERSION=2.9.3 +SCALA_VERSION=2.10 # Figure out where the Scala framework is installed FWDIR="$(cd `dirname $0`; pwd)" -- cgit v1.2.3 From 52ccf4f859d92ed9e86d3720a983ac2c4a1c23bf Mon Sep 17 00:00:00 2001 From: Andrew xia Date: Sat, 12 Oct 2013 14:34:14 +0800 Subject: deprecate "spark" script and SPAKR_CLASSPATH environment variable --- bin/spark | 92 ---------------------- core/pom.xml | 1 - .../main/scala/org/apache/spark/SparkContext.scala | 2 +- repl-bin/src/deb/bin/run | 3 +- repl/pom.xml | 1 - sbin/compute-classpath.cmd | 2 +- sbin/compute-classpath.sh | 2 +- 7 files changed, 4 insertions(+), 99 deletions(-) delete mode 100755 bin/spark (limited to 'repl-bin/src/deb/bin/run') diff --git a/bin/spark b/bin/spark deleted file mode 100755 index f5f7440d38..0000000000 --- a/bin/spark +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env bash - -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -SCALA_VERSION=2.9.3 - -# Figure out where the Scala framework is installed -FWDIR="$(cd `dirname $0`/..; pwd)" - -# Export this as SPARK_HOME -export SPARK_HOME="$FWDIR" - -# Load environment variables from conf/spark-env.sh, if it exists -if [ -e "$FWDIR/conf/spark-env.sh" ] ; then - . $FWDIR/conf/spark-env.sh -fi - -if [ -z "$1" ]; then - echo "Usage: spark []" >&2 - echo "Usage: export SPARK_CLASSPATH before running the command" >&2 - exit 1 -fi - - -# Find the java binary -if [ -n "${JAVA_HOME}" ]; then - RUNNER="${JAVA_HOME}/bin/java" -else - if [ `command -v java` ]; then - RUNNER="java" - else - echo "JAVA_HOME is not set" >&2 - exit 1 - fi -fi - -# Set SPARK_MEM if it isn't already set -SPARK_MEM=${SPARK_MEM:-512m} -export SPARK_MEM - -# Set APP_MEM if it isn't already set, we use this for this process as the app driver process may need -# as much memory as specified in SPARK_MEM -APP_MEM=${APP_MEM:-512m} - -# Set JAVA_OPTS to be able to load native libraries and to set heap size -JAVA_OPTS="$OUR_JAVA_OPTS" -JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH" -JAVA_OPTS="$JAVA_OPTS -Xms$APP_MEM -Xmx$APP_MEM" -# Load extra JAVA_OPTS from conf/java-opts, if it exists -if [ -e "$FWDIR/conf/java-opts" ] ; then - JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`" -fi -export JAVA_OPTS -# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala! - -if [ ! -f "$FWDIR/RELEASE" ]; then - # Exit if the user hasn't compiled Spark - ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null - if [[ $? != 0 ]]; then - echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2 - echo "You need to build Spark with sbt/sbt assembly before running this program" >&2 - exit 1 - fi -fi - -# Compute classpath using external script -CLASSPATH=`$FWDIR/sbin/compute-classpath.sh` -export CLASSPATH - -if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then - echo -n "Spark Command: " - echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" - echo "========================================" - echo -fi - -exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" diff --git a/core/pom.xml b/core/pom.xml index 9c2d6046a9..8359fefdb4 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -226,7 +226,6 @@ ${basedir}/.. 1 - ${spark.classpath} diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 912ce752fb..ce7c4feaf6 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -132,7 +132,7 @@ class SparkContext( // Environment variables to pass to our executors private[spark] val executorEnvs = HashMap[String, String]() // Note: SPARK_MEM is included for Mesos, but overwritten for standalone mode in ExecutorRunner - for (key <- Seq("SPARK_CLASSPATH", "SPARK_LIBRARY_PATH", "SPARK_JAVA_OPTS", "SPARK_TESTING")) { + for (key <- Seq("SPARK_LIBRARY_PATH", "SPARK_JAVA_OPTS", "SPARK_TESTING")) { val value = System.getenv(key) if (value != null) { executorEnvs(key) = value diff --git a/repl-bin/src/deb/bin/run b/repl-bin/src/deb/bin/run index 8b5d8300f2..d34f18906b 100755 --- a/repl-bin/src/deb/bin/run +++ b/repl-bin/src/deb/bin/run @@ -48,8 +48,7 @@ fi export JAVA_OPTS # Build up classpath -CLASSPATH="$SPARK_CLASSPATH" -CLASSPATH+=":$FWDIR/conf" +CLASSPATH=":$FWDIR/conf" for jar in `find $FWDIR -name '*jar'`; do CLASSPATH+=":$jar" done diff --git a/repl/pom.xml b/repl/pom.xml index 2826c0743c..f71184f865 100644 --- a/repl/pom.xml +++ b/repl/pom.xml @@ -125,7 +125,6 @@ ${basedir}/.. 1 - ${spark.classpath} diff --git a/sbin/compute-classpath.cmd b/sbin/compute-classpath.cmd index cf38188c4b..e0b8a8ef5f 100644 --- a/sbin/compute-classpath.cmd +++ b/sbin/compute-classpath.cmd @@ -29,7 +29,7 @@ rem Load environment variables from conf\spark-env.cmd, if it exists if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd" rem Build up classpath -set CLASSPATH=%SPARK_CLASSPATH%;%FWDIR%conf +set CLASSPATH=%FWDIR%conf if exist "%FWDIR%RELEASE" ( for %%d in ("%FWDIR%jars\spark-assembly*.jar") do ( set ASSEMBLY_JAR=%%d diff --git a/sbin/compute-classpath.sh b/sbin/compute-classpath.sh index d9217ecf77..cfe5fe7bef 100755 --- a/sbin/compute-classpath.sh +++ b/sbin/compute-classpath.sh @@ -31,7 +31,7 @@ if [ -e "$FWDIR/conf/spark-env.sh" ] ; then fi # Build up classpath -CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf" +CLASSPATH="$FWDIR/conf" if [ -f "$FWDIR/RELEASE" ]; then ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar` else -- cgit v1.2.3