aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-07-03 15:06:58 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-07-03 15:06:58 -0700
commit731f683b1bd8abbb83030b6bae14876658bbf098 (patch)
tree729c971a4dd31818da07b8cec4d69d7d7fa9104d
parenta9b52e5623f7fc77fca96b095f9eeaef76e35d54 (diff)
downloadspark-731f683b1bd8abbb83030b6bae14876658bbf098.tar.gz
spark-731f683b1bd8abbb83030b6bae14876658bbf098.tar.bz2
spark-731f683b1bd8abbb83030b6bae14876658bbf098.zip
[SPARK-2109] Setting SPARK_MEM for bin/pyspark does not work.
Trivial fix. Author: Prashant Sharma <prashant.s@imaginea.com> Closes #1050 from ScrapCodes/SPARK-2109/pyspark-script-bug and squashes the following commits: 77072b9 [Prashant Sharma] Changed echos to redirect to STDERR. 13f48a0 [Prashant Sharma] [SPARK-2109] Setting SPARK_MEM for bin/pyspark does not work.
-rwxr-xr-xbin/compute-classpath.sh8
-rwxr-xr-xbin/pyspark6
-rwxr-xr-xbin/run-example10
-rwxr-xr-xbin/spark-class13
4 files changed, 18 insertions, 19 deletions
diff --git a/bin/compute-classpath.sh b/bin/compute-classpath.sh
index 2cf4e381c1..e81e8c060c 100755
--- a/bin/compute-classpath.sh
+++ b/bin/compute-classpath.sh
@@ -81,10 +81,10 @@ ASSEMBLY_JAR=$(ls "$assembly_folder"/spark-assembly*hadoop*.jar 2>/dev/null)
# Verify that versions of java used to build the jars and run Spark are compatible
jar_error_check=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" nonexistent/class/path 2>&1)
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
- echo "Loading Spark jar with '$JAR_CMD' failed. "
- echo "This is likely because Spark was compiled with Java 7 and run "
- echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark "
- echo "or build Spark with Java 6."
+ echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
+ echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
+ echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
+ echo "or build Spark with Java 6." 1>&2
exit 1
fi
diff --git a/bin/pyspark b/bin/pyspark
index 0b5ed40e21..69b056fe28 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -26,7 +26,7 @@ export SPARK_HOME="$FWDIR"
SCALA_VERSION=2.10
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
- echo "Usage: ./bin/pyspark [options]"
+ echo "Usage: ./bin/pyspark [options]" 1>&2
$FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
exit 0
fi
@@ -36,8 +36,8 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
# Exit if the user hasn't compiled Spark
ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
if [[ $? != 0 ]]; then
- echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2
- echo "You need to build Spark before running this program" >&2
+ echo "Failed to find Spark assembly in $FWDIR/assembly/target" 1>&2
+ echo "You need to build Spark before running this program" 1>&2
exit 1
fi
fi
diff --git a/bin/run-example b/bin/run-example
index e7a5fe3914..942706d733 100755
--- a/bin/run-example
+++ b/bin/run-example
@@ -27,9 +27,9 @@ if [ -n "$1" ]; then
EXAMPLE_CLASS="$1"
shift
else
- echo "Usage: ./bin/run-example <example-class> [example-args]"
- echo " - set MASTER=XX to use a specific master"
- echo " - can use abbreviated example class name (e.g. SparkPi, mllib.LinearRegression)"
+ echo "Usage: ./bin/run-example <example-class> [example-args]" 1>&2
+ echo " - set MASTER=XX to use a specific master" 1>&2
+ echo " - can use abbreviated example class name (e.g. SparkPi, mllib.LinearRegression)" 1>&2
exit 1
fi
@@ -40,8 +40,8 @@ elif [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.ja
fi
if [[ -z $SPARK_EXAMPLES_JAR ]]; then
- echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" >&2
- echo "You need to build Spark before running this program" >&2
+ echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" 1>&2
+ echo "You need to build Spark before running this program" 1>&2
exit 1
fi
diff --git a/bin/spark-class b/bin/spark-class
index 60d9657c0f..04fa52c675 100755
--- a/bin/spark-class
+++ b/bin/spark-class
@@ -33,13 +33,13 @@ export SPARK_HOME="$FWDIR"
. $FWDIR/bin/load-spark-env.sh
if [ -z "$1" ]; then
- echo "Usage: spark-class <class> [<args>]" >&2
+ echo "Usage: spark-class <class> [<args>]" 1>&2
exit 1
fi
if [ -n "$SPARK_MEM" ]; then
- echo "Warning: SPARK_MEM is deprecated, please use a more specific config option"
- echo "(e.g., spark.executor.memory or SPARK_DRIVER_MEMORY)."
+ echo -e "Warning: SPARK_MEM is deprecated, please use a more specific config option" 1>&2
+ echo -e "(e.g., spark.executor.memory or SPARK_DRIVER_MEMORY)." 1>&2
fi
# Use SPARK_MEM or 512m as the default memory, to be overridden by specific options
@@ -147,10 +147,9 @@ fi
export CLASSPATH
if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
- echo -n "Spark Command: "
- echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
- echo "========================================"
- echo
+ echo -n "Spark Command: " 1>&2
+ echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" 1>&2
+ echo -e "========================================\n" 1>&2
fi
exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"