aboutsummaryrefslogblamecommitdiff
path: root/run-pyspark
blob: deb0d708b381b3bf65408cb27a8564c6a46887f6 (plain) (tree)
1
2
3
4
                   

                                                   
                               















                                                                 
                                                 
 




                                                 
                           
#!/usr/bin/env bash

# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)"

# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"

# Load environment variables from conf/spark-env.sh, if it exists
if [ -e $FWDIR/conf/spark-env.sh ] ; then
  . $FWDIR/conf/spark-env.sh
fi

# Figure out which Python executable to use
if [ -z "$PYSPARK_PYTHON" ] ; then
  PYSPARK_PYTHON="python"
fi
export PYSPARK_PYTHON

# Add the PySpark classes to the Python path:
export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH

# Launch with `scala` by default:
if [[ "$SPARK_LAUNCH_WITH_SCALA" != "0" ]] ; then
    export SPARK_LAUNCH_WITH_SCALA=1
fi

exec "$PYSPARK_PYTHON" "$@"