aboutsummaryrefslogtreecommitdiff
path: root/sbin/start-thriftserver.sh
diff options
context:
space:
mode:
authorCheng Lian <lian.cs.zju@gmail.com>2014-08-06 12:28:35 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-06 12:28:35 -0700
commita6cd31108f0d73ce6823daafe8447677e03cfd13 (patch)
treeb37ec4045b01db8d5f5635fe77e6b2a50d082830 /sbin/start-thriftserver.sh
parent48789117c2dd6d38e0bd8d21cdbcb989913205a6 (diff)
downloadspark-a6cd31108f0d73ce6823daafe8447677e03cfd13.tar.gz
spark-a6cd31108f0d73ce6823daafe8447677e03cfd13.tar.bz2
spark-a6cd31108f0d73ce6823daafe8447677e03cfd13.zip
[SPARK-2678][Core][SQL] A workaround for SPARK-2678
JIRA issues: - Main: [SPARK-2678](https://issues.apache.org/jira/browse/SPARK-2678) - Related: [SPARK-2874](https://issues.apache.org/jira/browse/SPARK-2874) Related PR: - #1715 This PR is both a fix for SPARK-2874 and a workaround for SPARK-2678. Fixing SPARK-2678 completely requires some API level changes that need further discussion, and we decided not to include it in Spark 1.1 release. As currently SPARK-2678 only affects Spark SQL scripts, this workaround is enough for Spark 1.1. Command line option handling logic in bash scripts looks somewhat dirty and duplicated, but it helps to provide a cleaner user interface as well as retain full downward compatibility for now. Author: Cheng Lian <lian.cs.zju@gmail.com> Closes #1801 from liancheng/spark-2874 and squashes the following commits: 8045d7a [Cheng Lian] Make sure test suites pass 8493a9e [Cheng Lian] Using eval to retain quoted arguments aed523f [Cheng Lian] Fixed typo in bin/spark-sql f12a0b1 [Cheng Lian] Worked arount SPARK-2678 daee105 [Cheng Lian] Fixed usage messages of all Spark SQL related scripts
Diffstat (limited to 'sbin/start-thriftserver.sh')
-rwxr-xr-xsbin/start-thriftserver.sh50
1 files changed, 46 insertions, 4 deletions
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index 8398e6f19b..603f50ae13 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -26,11 +26,53 @@ set -o posix
# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
-if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
- echo "Usage: ./sbin/start-thriftserver [options]"
+CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
+
+function usage {
+ echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
+ pattern="usage"
+ pattern+="\|Spark assembly has been built with Hive"
+ pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
+ pattern+="\|Spark Command: "
+ pattern+="\|======="
+ pattern+="\|--help"
+
$FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+ echo
+ echo "Thrift server options:"
+ $FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+}
+
+function ensure_arg_number {
+ arg_number=$1
+ at_least=$2
+
+ if [[ $arg_number -lt $at_least ]]; then
+ usage
+ exit 1
+ fi
+}
+
+if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
+ usage
exit 0
fi
-CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
-exec "$FWDIR"/bin/spark-submit --class $CLASS spark-internal $@
+THRIFT_SERVER_ARGS=()
+SUBMISSION_ARGS=()
+
+while (($#)); do
+ case $1 in
+ --hiveconf)
+ ensure_arg_number $# 2
+ THRIFT_SERVER_ARGS+=($1); shift
+ THRIFT_SERVER_ARGS+=($1); shift
+ ;;
+
+ *)
+ SUBMISSION_ARGS+=($1); shift
+ ;;
+ esac
+done
+
+eval exec "$FWDIR"/bin/spark-submit --class $CLASS ${SUBMISSION_ARGS[*]} spark-internal ${THRIFT_SERVER_ARGS[*]}